content
stringlengths 7
928k
| avg_line_length
float64 3.5
33.8k
| max_line_length
int64 6
139k
| alphanum_fraction
float64 0.08
0.96
| licenses
sequence | repository_name
stringlengths 7
104
| path
stringlengths 4
230
| size
int64 7
928k
| lang
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|
# encoding: utf-8
import datetime
import uuid
import logging
from sqlalchemy.orm import class_mapper
from six import string_types
import ckan.lib.dictization as d
import ckan.lib.helpers as h
import ckan.authz as authz
log = logging.getLogger(__name__)
def resource_dict_save(res_dict, context):
model = context["model"]
session = context["session"]
id = res_dict.get("id")
obj = None
if id:
obj = session.query(model.Resource).get(id)
if not obj:
new = True
obj = model.Resource()
else:
new = False
table = class_mapper(model.Resource).mapped_table
fields = [field.name for field in table.c]
# Resource extras not submitted will be removed from the existing extras
# dict
new_extras = {}
for key, value in res_dict.iteritems():
if isinstance(value, list):
continue
if key in ('extras', 'revision_timestamp', 'tracking_summary'):
continue
if key in fields:
if isinstance(getattr(obj, key), datetime.datetime):
if getattr(obj, key).isoformat() == value:
continue
if key == 'last_modified' and not new:
obj.url_changed = True
if key == 'url' and not new and obj.url != value:
obj.url_changed = True
setattr(obj, key, value)
else:
# resources save extras directly onto the object, instead
# of in a separate extras field like packages and groups
new_extras[key] = value
obj.state = u'active'
obj.extras = new_extras
session.add(obj)
return obj
def package_resource_list_save(res_dicts, package, context):
allow_partial_update = context.get("allow_partial_update", False)
if res_dicts is None and allow_partial_update:
return
resource_list = package.resources_all
old_list = package.resources_all[:]
obj_list = []
for res_dict in res_dicts or []:
if not u'package_id' in res_dict or not res_dict[u'package_id']:
res_dict[u'package_id'] = package.id
obj = resource_dict_save(res_dict, context)
obj_list.append(obj)
# Set the package's resources. resource_list is an ORM relation - the
# package's resources. If we didn't have the slice operator "[:]" then it
# would reassign the variable "resource_list" to be the obj_list. But with
# the slice operator it changes the contents of the relation, setting the
# package's resources.
# At the table level, for each resource in the obj_list, its
# resource.package_id is changed to this package (which is needed for new
# resources), and every resource.position is set to ascending integers,
# according to their ordering in the obj_list.
resource_list[:] = obj_list
# Mark any left-over resources as deleted
for resource in set(old_list) - set(obj_list):
resource.state = 'deleted'
resource_list.append(resource)
def package_extras_save(extra_dicts, obj, context):
allow_partial_update = context.get("allow_partial_update", False)
if extra_dicts is None and allow_partial_update:
return
model = context["model"]
session = context["session"]
extras_list = obj.extras_list
old_extras = dict((extra.key, extra) for extra in extras_list)
new_extras = {}
for extra_dict in extra_dicts or []:
if extra_dict.get("deleted"):
continue
if extra_dict['value'] is None:
pass
else:
new_extras[extra_dict["key"]] = extra_dict["value"]
#new
for key in set(new_extras.keys()) - set(old_extras.keys()):
state = 'active'
extra = model.PackageExtra(state=state, key=key, value=new_extras[key])
session.add(extra)
extras_list.append(extra)
#changed
for key in set(new_extras.keys()) & set(old_extras.keys()):
extra = old_extras[key]
if new_extras[key] == extra.value and extra.state != 'deleted':
continue
state = 'active'
extra.value = new_extras[key]
extra.state = state
session.add(extra)
#deleted
for key in set(old_extras.keys()) - set(new_extras.keys()):
extra = old_extras[key]
if extra.state == 'deleted':
continue
state = 'deleted'
extra.state = state
def group_extras_save(extras_dicts, context):
model = context["model"]
session = context["session"]
result_dict = {}
for extra_dict in extras_dicts:
if extra_dict.get("deleted"):
continue
result_dict[extra_dict["key"]] = extra_dict["value"]
return result_dict
def package_tag_list_save(tag_dicts, package, context):
allow_partial_update = context.get("allow_partial_update", False)
if tag_dicts is None and allow_partial_update:
return
model = context["model"]
session = context["session"]
tag_package_tag = dict((package_tag.tag, package_tag)
for package_tag in
package.package_tag_all)
tag_package_tag_inactive = {tag: pt for tag,pt in tag_package_tag.items() if
pt.state in ['deleted']}
tag_name_vocab = set()
tags = set()
for tag_dict in tag_dicts or []:
if (tag_dict.get('name'), tag_dict.get('vocabulary_id')) not in tag_name_vocab:
tag_obj = d.table_dict_save(tag_dict, model.Tag, context)
tags.add(tag_obj)
tag_name_vocab.add((tag_obj.name, tag_obj.vocabulary_id))
# 3 cases
# case 1: currently active but not in new list
for tag in set(tag_package_tag.keys()) - tags:
package_tag = tag_package_tag[tag]
package_tag.state = 'deleted'
# case 2: in new list but never used before
for tag in tags - set(tag_package_tag.keys()):
state = 'active'
package_tag_obj = model.PackageTag(package, tag, state)
session.add(package_tag_obj)
tag_package_tag[tag] = package_tag_obj
# case 3: in new list and already used but in deleted state
for tag in tags.intersection(set(tag_package_tag_inactive.keys())):
state = 'active'
package_tag = tag_package_tag[tag]
package_tag.state = state
package.package_tag_all[:] = tag_package_tag.values()
def package_membership_list_save(group_dicts, package, context):
allow_partial_update = context.get("allow_partial_update", False)
if group_dicts is None and allow_partial_update:
return
capacity = 'public'
model = context["model"]
session = context["session"]
user = context.get('user')
members = session.query(model.Member) \
.filter(model.Member.table_id == package.id) \
.filter(model.Member.capacity != 'organization')
group_member = dict((member.group, member)
for member in
members)
groups = set()
for group_dict in group_dicts or []:
id = group_dict.get("id")
name = group_dict.get("name")
capacity = group_dict.get("capacity", "public")
if capacity == 'organization':
continue
if id:
group = session.query(model.Group).get(id)
else:
group = session.query(model.Group).filter_by(name=name).first()
if group:
groups.add(group)
## need to flush so we can get out the package id
model.Session.flush()
# Remove any groups we are no longer in
for group in set(group_member.keys()) - groups:
member_obj = group_member[group]
if member_obj and member_obj.state == 'deleted':
continue
if authz.has_user_permission_for_group_or_org(
member_obj.group_id, user, 'read'):
member_obj.capacity = capacity
member_obj.state = 'deleted'
session.add(member_obj)
# Add any new groups
for group in groups:
member_obj = group_member.get(group)
if member_obj and member_obj.state == 'active':
continue
if authz.has_user_permission_for_group_or_org(
group.id, user, 'read'):
member_obj = group_member.get(group)
if member_obj:
member_obj.capacity = capacity
member_obj.state = 'active'
else:
member_obj = model.Member(table_id=package.id,
table_name='package',
group=group,
capacity=capacity,
group_id=group.id,
state = 'active')
session.add(member_obj)
def relationship_list_save(relationship_dicts, package, attr, context):
allow_partial_update = context.get("allow_partial_update", False)
if relationship_dicts is None and allow_partial_update:
return
model = context["model"]
session = context["session"]
relationship_list = getattr(package, attr)
old_list = relationship_list[:]
relationships = []
for relationship_dict in relationship_dicts or []:
obj = d.table_dict_save(relationship_dict,
model.PackageRelationship, context)
relationships.append(obj)
relationship_list[:] = relationships
for relationship in set(old_list) - set(relationship_list):
relationship.state = 'deleted'
relationship_list.append(relationship)
def package_dict_save(pkg_dict, context):
model = context["model"]
package = context.get("package")
allow_partial_update = context.get("allow_partial_update", False)
if package:
pkg_dict["id"] = package.id
Package = model.Package
if 'metadata_created' in pkg_dict:
del pkg_dict['metadata_created']
if 'metadata_modified' in pkg_dict:
del pkg_dict['metadata_modified']
pkg = d.table_dict_save(pkg_dict, Package, context)
if not pkg.id:
pkg.id = str(uuid.uuid4())
package_resource_list_save(pkg_dict.get("resources"), pkg, context)
package_tag_list_save(pkg_dict.get("tags"), pkg, context)
package_membership_list_save(pkg_dict.get("groups"), pkg, context)
# relationships are not considered 'part' of the package, so only
# process this if the key is provided
if 'relationships_as_subject' in pkg_dict:
subjects = pkg_dict.get('relationships_as_subject')
relationship_list_save(subjects, pkg, 'relationships_as_subject', context)
if 'relationships_as_object' in pkg_dict:
objects = pkg_dict.get('relationships_as_object')
relationship_list_save(objects, pkg, 'relationships_as_object', context)
extras = package_extras_save(pkg_dict.get("extras"), pkg, context)
return pkg
def group_member_save(context, group_dict, member_table_name):
model = context["model"]
session = context["session"]
group = context['group']
entity_list = group_dict.get(member_table_name, None)
if entity_list is None:
if context.get('allow_partial_update', False):
return {'added': [], 'removed': []}
else:
entity_list = []
entities = {}
Member = model.Member
classname = member_table_name[:-1].capitalize()
if classname == 'Organization':
# Organizations use the model.Group class
classname = 'Group'
ModelClass = getattr(model, classname)
for entity_dict in entity_list:
name_or_id = entity_dict.get('id') or entity_dict.get('name')
obj = ModelClass.get(name_or_id)
if obj and obj not in entities.values():
entities[(obj.id, entity_dict.get('capacity', 'public'))] = obj
members = session.query(Member).filter_by(
table_name=member_table_name[:-1],
group_id=group.id,
).all()
processed = {
'added': [],
'removed': []
}
entity_member = dict(((member.table_id, member.capacity), member) for member in members)
for entity_id in set(entity_member.keys()) - set(entities.keys()):
if entity_member[entity_id].state != 'deleted':
processed['removed'].append(entity_id[0])
entity_member[entity_id].state = 'deleted'
session.add(entity_member[entity_id])
for entity_id in set(entity_member.keys()) & set(entities.keys()):
if entity_member[entity_id].state != 'active':
processed['added'].append(entity_id[0])
entity_member[entity_id].state = 'active'
session.add(entity_member[entity_id])
for entity_id in set(entities.keys()) - set(entity_member.keys()):
member = Member(group=group, group_id=group.id, table_id=entity_id[0],
table_name=member_table_name[:-1],
capacity=entity_id[1])
processed['added'].append(entity_id[0])
session.add(member)
return processed
def group_dict_save(group_dict, context, prevent_packages_update=False):
from ckan.lib.search import rebuild
model = context["model"]
session = context["session"]
group = context.get("group")
allow_partial_update = context.get("allow_partial_update", False)
Group = model.Group
if group:
group_dict["id"] = group.id
group = d.table_dict_save(group_dict, Group, context)
if not group.id:
group.id = str(uuid.uuid4())
context['group'] = group
# Under the new org rules we do not want to be able to update datasets
# via group edit so we need a way to prevent this. It may be more
# sensible in future to send a list of allowed/disallowed updates for
# groups, users, tabs etc.
if not prevent_packages_update:
pkgs_edited = group_member_save(context, group_dict, 'packages')
else:
pkgs_edited = {
'added': [],
'removed': []
}
group_users_changed = group_member_save(context, group_dict, 'users')
group_groups_changed = group_member_save(context, group_dict, 'groups')
group_tags_changed = group_member_save(context, group_dict, 'tags')
log.debug('Group save membership changes - Packages: %r Users: %r '
'Groups: %r Tags: %r', pkgs_edited, group_users_changed,
group_groups_changed, group_tags_changed)
extras = group_extras_save(group_dict.get("extras", {}), context)
if extras or not allow_partial_update:
old_extras = set(group.extras.keys())
new_extras = set(extras.keys())
for key in old_extras - new_extras:
del group.extras[key]
for key in new_extras:
group.extras[key] = extras[key]
# We will get a list of packages that we have either added or
# removed from the group, and trigger a re-index.
package_ids = pkgs_edited['removed']
package_ids.extend( pkgs_edited['added'] )
if package_ids:
session.commit()
map( rebuild, package_ids )
return group
def user_dict_save(user_dict, context):
model = context['model']
session = context['session']
user = context.get('user_obj')
User = model.User
if user:
user_dict['id'] = user.id
if 'password' in user_dict and not len(user_dict['password']):
del user_dict['password']
user = d.table_dict_save(user_dict, User, context)
return user
def package_api_to_dict(api1_dict, context):
package = context.get("package")
api_version = context.get('api_version')
assert api_version, 'No api_version supplied in context'
dictized = {}
for key, value in api1_dict.iteritems():
new_value = value
if key == 'tags':
if isinstance(value, string_types):
new_value = [{"name": item} for item in value.split()]
else:
new_value = [{"name": item} for item in value]
if key == 'extras':
updated_extras = {}
if package:
updated_extras.update(package.extras)
updated_extras.update(value)
new_value = []
for extras_key, extras_value in updated_extras.iteritems():
new_value.append({"key": extras_key,
"value": extras_value})
if key == 'groups' and len(value):
if api_version == 1:
new_value = [{'name': item} for item in value]
else:
new_value = [{'id': item} for item in value]
dictized[key] = new_value
download_url = dictized.pop('download_url', None)
if download_url and not dictized.get('resources'):
dictized["resources"] = [{'url': download_url}]
download_url = dictized.pop('download_url', None)
return dictized
def group_api_to_dict(api1_dict, context):
dictized = {}
for key, value in api1_dict.iteritems():
new_value = value
if key == 'packages':
new_value = [{"id": item} for item in value]
if key == 'extras':
new_value = [{"key": extra_key, "value": value[extra_key]}
for extra_key in value]
dictized[key] = new_value
return dictized
def task_status_dict_save(task_status_dict, context):
model = context["model"]
task_status = context.get("task_status")
allow_partial_update = context.get("allow_partial_update", False)
if task_status:
task_status_dict["id"] = task_status.id
task_status = d.table_dict_save(task_status_dict, model.TaskStatus, context)
return task_status
def activity_dict_save(activity_dict, context):
model = context['model']
session = context['session']
user_id = activity_dict['user_id']
object_id = activity_dict['object_id']
revision_id = activity_dict['revision_id']
activity_type = activity_dict['activity_type']
if activity_dict.has_key('data'):
data = activity_dict['data']
else:
data = None
activity_obj = model.Activity(user_id, object_id, revision_id,
activity_type, data)
session.add(activity_obj)
# TODO: Handle activity details.
return activity_obj
def vocabulary_tag_list_save(new_tag_dicts, vocabulary_obj, context):
model = context['model']
session = context['session']
# First delete any tags not in new_tag_dicts.
for tag in vocabulary_obj.tags:
if tag.name not in [t['name'] for t in new_tag_dicts]:
tag.delete()
# Now add any new tags.
for tag_dict in new_tag_dicts:
current_tag_names = [tag.name for tag in vocabulary_obj.tags]
if tag_dict['name'] not in current_tag_names:
# Make sure the tag belongs to this vocab..
tag_dict['vocabulary_id'] = vocabulary_obj.id
# then add it.
tag_dict_save(tag_dict, {'model': model, 'session': session})
def vocabulary_dict_save(vocabulary_dict, context):
model = context['model']
session = context['session']
vocabulary_name = vocabulary_dict['name']
vocabulary_obj = model.Vocabulary(vocabulary_name)
session.add(vocabulary_obj)
if vocabulary_dict.has_key('tags'):
vocabulary_tag_list_save(vocabulary_dict['tags'], vocabulary_obj,
context)
return vocabulary_obj
def vocabulary_dict_update(vocabulary_dict, context):
model = context['model']
session = context['session']
vocabulary_obj = model.vocabulary.Vocabulary.get(vocabulary_dict['id'])
if vocabulary_dict.has_key('name'):
vocabulary_obj.name = vocabulary_dict['name']
if vocabulary_dict.has_key('tags'):
vocabulary_tag_list_save(vocabulary_dict['tags'], vocabulary_obj,
context)
return vocabulary_obj
def tag_dict_save(tag_dict, context):
model = context['model']
tag = context.get('tag')
if tag:
tag_dict['id'] = tag.id
tag = d.table_dict_save(tag_dict, model.Tag, context)
return tag
def follower_dict_save(data_dict, context, FollowerClass):
model = context['model']
session = context['session']
follower_obj = FollowerClass(
follower_id=model.User.get(context['user']).id,
object_id=data_dict['id'])
session.add(follower_obj)
return follower_obj
def resource_view_dict_save(data_dict, context):
model = context['model']
resource_view = context.get('resource_view')
if resource_view:
data_dict['id'] = resource_view.id
config = {}
for key, value in data_dict.iteritems():
if key not in model.ResourceView.get_columns():
config[key] = value
data_dict['config'] = config
return d.table_dict_save(data_dict, model.ResourceView, context)
| 33.578352 | 92 | 0.637864 | [
"Apache-2.0"
] | NP-compete/ckan | ckan/lib/dictization/model_save.py | 20,785 | Python |
""" ************************************************
* fileName: train.py
* desc: The training file for SimDeblur,
pay much attention to your constructed configs.
* author: mingdeng_cao
* date: 2021/07/14 17:26
* last revised: Reformat the file
************************************************ """
from simdeblur.config import build_config, merge_args
from simdeblur.engine.parse_arguments import parse_arguments
from simdeblur.engine.trainer import Trainer
def main():
args = parse_arguments()
cfg = build_config(args.config_file)
cfg = merge_args(cfg, args)
cfg.args = args
trainer = Trainer(cfg)
trainer.train()
if __name__ == "__main__":
main()
| 23.896552 | 60 | 0.613276 | [
"MIT"
] | Wang-jiahao/SimDeblur | train.py | 693 | Python |
answer = input ("Would you like to play?")
if answer.lower().strip() == "yes":
print ("Yay! Let's get started.")
answer = input ("You have reached an apple tree, would you like to pick an apple?").lower ().strip()
if answer == "yes":
answer = input ("would you like to eat the apple?")
if answer == "yes":
print ("That was not a great idea!")
else:
print ("good choice, you made it out safely.")
answer = input ("you encounter the apple tree owner and are accussed of stealing. would you like to? (run/apologize)")
if answer == "run":
print ("you have been arressted! Game Over!")
else:
print ("you have won! Congratulations!")
elif answer == "no":
print ("congratulations you have won!")
else:
print ("Invalid choice, you lost!")
else:
print ("Aww that's so sad")
| 33.285714 | 131 | 0.562232 | [
"MIT"
] | sabdllah/03-Text-adventure | game.py | 932 | Python |
import operator
import os
from collections import OrderedDict
from pandas import DataFrame
from cave.analyzer.parameter_importance.base_parameter_importance import BaseParameterImportance
class Fanova(BaseParameterImportance):
"""
fANOVA (functional analysis of variance) computes the fraction of the variance in the cost space explained by
changing a parameter by marginalizing over all other parameters, for each parameter (or for pairs of
parameters). Parameters with high importance scores will have a large impact on the performance. To this end, a
random forest is trained as an empirical performance model on the available empirical data from the available
runhistories.
"""
def __init__(self,
runscontainer,
marginal_threshold=0.05):
"""Wrapper for parameter_importance to save the importance-object/ extract the results. We want to show the
top X most important parameter-fanova-plots.
Parameters
----------
runscontainer: RunsContainer
contains all important information about the configurator runs
marginal_threshold: float
parameter/s must be at least this important to be mentioned
"""
super().__init__(runscontainer)
self.marginal_threshold = marginal_threshold
self.parameter_importance("fanova")
def get_name(self):
return 'fANOVA'
def postprocess(self, pimp, output_dir):
result = OrderedDict()
def parse_pairwise(p):
"""parse pimp's way of having pairwise parameters as key as str and return list of individuals"""
res = [tmp.strip('\' ') for tmp in p.strip('[]').split(',')]
return res
parameter_imp = {k: v * 100 for k, v in pimp.evaluator.evaluated_parameter_importance.items()}
param_imp_std = {}
if hasattr(pimp.evaluator, 'evaluated_parameter_importance_uncertainty'):
param_imp_std = {k: v * 100 for k, v in pimp.evaluator.evaluated_parameter_importance_uncertainty.items()}
for k in parameter_imp.keys():
self.logger.debug("fanova-importance for %s: mean (over trees): %f, std: %s", k, parameter_imp[k],
str(param_imp_std[k]) if param_imp_std else 'N/A')
# Split single and pairwise (pairwise are string: "['p1','p2']")
single_imp = {k: v for k, v in parameter_imp.items() if not k.startswith('[') and v > self.marginal_threshold}
pairwise_imp = {k: v for k, v in parameter_imp.items() if k.startswith('[') and v > self.marginal_threshold}
# Set internal parameter importance for further analysis (such as parallel coordinates)
self.fanova_single_importance = single_imp
self.fanova_pairwise_importance = single_imp
# Dicts to lists of tuples, sorted descending after importance
single_imp = OrderedDict(sorted(single_imp.items(), key=operator.itemgetter(1), reverse=True))
pairwise_imp = OrderedDict(sorted(pairwise_imp.items(), key=operator.itemgetter(1), reverse=True))
# Create table
table = []
if len(single_imp) > 0:
table.extend([(20*"-"+" Single importance: "+20*"-", 20*"-")])
for k, v in single_imp.items():
value = str(round(v, 4))
if param_imp_std:
value += " +/- " + str(round(param_imp_std[k], 4))
table.append((k, value))
if len(pairwise_imp) > 0:
table.extend([(20*"-"+" Pairwise importance: "+20*"-", 20*"-")])
for k, v in pairwise_imp.items():
name = ' & '.join(parse_pairwise(k))
value = str(round(v, 4))
if param_imp_std:
value += " +/- " + str(round(param_imp_std[k], 4))
table.append((name, value))
keys, fanova_table = [k[0] for k in table], [k[1:] for k in table]
df = DataFrame(data=fanova_table, index=keys)
result['Importance'] = {'table': df.to_html(escape=False, header=False, index=True, justify='left')}
# Get plot-paths
result['Marginals'] = {p: {'figure': os.path.join(output_dir, "fanova", p + '.png')} for p in single_imp.keys()}
# Right now no way to access paths of the plots -> file issue
pairwise_plots = {" & ".join(parse_pairwise(p)):
os.path.join(output_dir, 'fanova', '_'.join(parse_pairwise(p)) + '.png')
for p in pairwise_imp.keys()}
result['Pairwise Marginals'] = {p: {'figure': path}
for p, path in pairwise_plots.items() if os.path.exists(path)}
return result
def get_jupyter(self):
from IPython.core.display import HTML, Image, display
for b, result in self.result.items():
error = self.result[b]['else'] if 'else' in self.result[b] else None
if error:
display(HTML(error))
else:
# Show table
display(HTML(self.result[b]["Importance"]["table"]))
# Show plots
display(*list([Image(filename=d["figure"]) for d in self.result[b]['Marginals'].values()]))
display(*list([Image(filename=d["figure"]) for d in self.result[b]['Pairwise Marginals'].values()]))
# While working for a prettier solution, this might be an option:
# display(HTML(figure_to_html([d["figure"] for d in self.result[b]['Marginals'].values()] +
# [d["figure"] for d in self.result[b]['Pairwise Marginals'].values()],
# max_in_a_row=3, true_break_between_rows=True)))
| 49.067797 | 120 | 0.604836 | [
"BSD-3-Clause"
] | automl/CAVE | cave/analyzer/parameter_importance/fanova.py | 5,790 | Python |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ReleaseTaskAttachment(Model):
"""ReleaseTaskAttachment.
:param _links:
:type _links: :class:`ReferenceLinks <release.v4_1.models.ReferenceLinks>`
:param created_on:
:type created_on: datetime
:param modified_by:
:type modified_by: :class:`IdentityRef <release.v4_1.models.IdentityRef>`
:param modified_on:
:type modified_on: datetime
:param name:
:type name: str
:param record_id:
:type record_id: str
:param timeline_id:
:type timeline_id: str
:param type:
:type type: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_by': {'key': 'modifiedBy', 'type': 'IdentityRef'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'record_id': {'key': 'recordId', 'type': 'str'},
'timeline_id': {'key': 'timelineId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, _links=None, created_on=None, modified_by=None, modified_on=None, name=None, record_id=None, timeline_id=None, type=None):
super(ReleaseTaskAttachment, self).__init__()
self._links = _links
self.created_on = created_on
self.modified_by = modified_by
self.modified_on = modified_on
self.name = name
self.record_id = record_id
self.timeline_id = timeline_id
self.type = type
| 40.37037 | 146 | 0.544495 | [
"Unlicense",
"MIT"
] | amcclead7336/Enterprise_Data_Science_Final | venv/lib/python3.8/site-packages/vsts/release/v4_1/models/release_task_attachment.py | 2,180 | Python |
try:
from maya import cmds
except ImportError:
pass
from menus import typeIDs as nem_typeids, base as nem_base
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
def createOutputJnts(*args):
## Create outputs for the selected hermite nodes
exitB = "Exit"
doitB = "doIt"
if not cmds.ls(sl=True):
logger.warning("You must have a {} selected!".format(nem_typeids.HA_NODENAME))
return
confirm = cmds.confirmDialog(title="Create?", message="Ok?", button=doitB, db=doitB, b=exitB, cb=exitB)
if confirm == doitB:
for e in cmds.ls(sl=True):
outCount = cmds.getAttr("{}.outputCount".format(e))
for x in range(outCount):
loc = cmds.joint(n='{}_out{}'.format(e, x))
cmds.select(clear=True)
cmds.connectAttr("{}.outputs[{}].translate".format(e, x), "{}.translate".format(loc), f=True)
cmds.connectAttr("{}.outputs[{}].rotate".format(e, x), "{}.rotate".format(loc), f=True)
cmds.connectAttr("{}.outputs[{}].scale".format(e, x), "{}.scale".format(loc), f=True)
class HermiteArraySOUTH(nem_base.MenuBase):
ID = nem_typeids.HASOUTH
MENUNAME = nem_typeids.HASOUTH_MENUNAME
NODENAME = nem_typeids.HA_NODENAME
FUNCTION = createOutputJnts
def __init__(self):
nem_base.MenuBase.__init__(self,
isRadial=nem_typeids.HASOUTH_ISRADIAL,
radialPos=nem_typeids.HASOUTH_RADIALPOS)
class HermiteArrayNORTH(nem_base.MenuBase):
ID = nem_typeids.HANORTH
MENUNAME = nem_typeids.HANORTH_MENUNAME
NODENAME = nem_typeids.HA_NODENAME
def __init__(self):
nem_base.MenuBase.__init__(self,
isRadial=nem_typeids.HANORTH_ISRADIAL,
radialPos=nem_typeids.HANORTH_RADIALPOS)
| 37.490196 | 109 | 0.625 | [
"Apache-2.0"
] | fsanges/neMenuManager | menus/nodes/hermite.py | 1,912 | Python |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import datetime
import json
import logging
import time
from unittest import mock
import eventlet
import fixtures
from oslo_config import cfg
from heat.common import context
from heat.common import exception
from heat.common import template_format
from heat.common import timeutils
from heat.db.sqlalchemy import api as db_api
from heat.engine.clients.os import keystone
from heat.engine.clients.os.keystone import fake_keystoneclient as fake_ks
from heat.engine.clients.os import nova
from heat.engine import environment
from heat.engine import function
from heat.engine import node_data
from heat.engine import resource
from heat.engine import scheduler
from heat.engine import service
from heat.engine import stack
from heat.engine import stk_defn
from heat.engine import template
from heat.engine import update
from heat.objects import raw_template as raw_template_object
from heat.objects import resource as resource_objects
from heat.objects import stack as stack_object
from heat.objects import stack_tag as stack_tag_object
from heat.objects import user_creds as ucreds_object
from heat.tests import common
from heat.tests import fakes
from heat.tests import generic_resource as generic_rsrc
from heat.tests import utils
empty_template = template_format.parse('''{
"HeatTemplateFormatVersion" : "2012-12-12",
}''')
class StackTest(common.HeatTestCase):
def setUp(self):
super(StackTest, self).setUp()
self.tmpl = template.Template(copy.deepcopy(empty_template))
self.ctx = utils.dummy_context()
self.stub_auth()
def test_stack_reads_tenant(self):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
tenant_id='bar')
self.assertEqual('bar', self.stack.tenant_id)
def test_stack_reads_tenant_from_context_if_empty(self):
self.ctx.tenant = 'foo'
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
tenant_id=None)
self.assertEqual('foo', self.stack.tenant_id)
def test_stack_reads_username(self):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
username='bar')
self.assertEqual('bar', self.stack.username)
def test_stack_reads_username_from_context_if_empty(self):
self.ctx.username = 'foo'
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
username=None)
self.assertEqual('foo', self.stack.username)
def test_stack_string_repr(self):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
expected = 'Stack "%s" [%s]' % (self.stack.name, self.stack.id)
observed = str(self.stack)
self.assertEqual(expected, observed)
def test_state_defaults(self):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
self.assertEqual(('CREATE', 'IN_PROGRESS'), self.stack.state)
self.assertEqual('', self.stack.status_reason)
def test_timeout_secs_default(self):
cfg.CONF.set_override('stack_action_timeout', 1000)
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
self.assertIsNone(self.stack.timeout_mins)
self.assertEqual(1000, self.stack.timeout_secs())
def test_timeout_secs(self):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
timeout_mins=10)
self.assertEqual(600, self.stack.timeout_secs())
@mock.patch.object(stack, 'oslo_timeutils')
def test_time_elapsed(self, mock_tu):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
# dummy create time 10:00:00
self.stack.created_time = datetime.datetime(2015, 7, 27, 10, 0, 0)
# mock utcnow set to 10:10:00 (600s offset)
mock_tu.utcnow.return_value = datetime.datetime(2015, 7, 27, 10, 10, 0)
self.assertEqual(600, self.stack.time_elapsed())
@mock.patch.object(stack, 'oslo_timeutils')
def test_time_elapsed_negative(self, mock_tu):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
# dummy create time 10:00:00
self.stack.created_time = datetime.datetime(2015, 7, 27, 10, 0, 0)
# mock utcnow set to 09:59:50 (-10s offset)
mock_tu.utcnow.return_value = datetime.datetime(2015, 7, 27, 9, 59, 50)
self.assertEqual(-10, self.stack.time_elapsed())
@mock.patch.object(stack, 'oslo_timeutils')
def test_time_elapsed_ms(self, mock_tu):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
# dummy create time 10:00:00
self.stack.created_time = datetime.datetime(2015, 7, 27, 10, 5, 0)
# mock utcnow set to microsecond offset
mock_tu.utcnow.return_value = datetime.datetime(2015, 7, 27,
10, 4, 59, 750000)
self.assertEqual(-0.25, self.stack.time_elapsed())
@mock.patch.object(stack, 'oslo_timeutils')
def test_time_elapsed_with_updated_time(self, mock_tu):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
# dummy create time 10:00:00
self.stack.created_time = datetime.datetime(2015, 7, 27, 10, 0, 0)
# dummy updated time 11:00:00; should consider this not created_time
self.stack.updated_time = datetime.datetime(2015, 7, 27, 11, 0, 0)
# mock utcnow set to 11:10:00 (600s offset)
mock_tu.utcnow.return_value = datetime.datetime(2015, 7, 27, 11, 10, 0)
self.assertEqual(600, self.stack.time_elapsed())
@mock.patch.object(stack.Stack, 'time_elapsed')
def test_time_remaining(self, mock_te):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
# mock time elapsed; set to 600 seconds
mock_te.return_value = 600
# default stack timeout is 3600 seconds; remaining time 3000 secs
self.assertEqual(3000, self.stack.time_remaining())
@mock.patch.object(stack.Stack, 'time_elapsed')
def test_has_timed_out(self, mock_te):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl)
self.stack.status = self.stack.IN_PROGRESS
# test with timed out stack
mock_te.return_value = 3601
# default stack timeout is 3600 seconds; stack should time out
self.assertTrue(self.stack.has_timed_out())
# mock time elapsed; set to 600 seconds
mock_te.return_value = 600
# default stack timeout is 3600 seconds; remaining time 3000 secs
self.assertFalse(self.stack.has_timed_out())
# has_timed_out has no meaning when stack completes/fails;
# should return false
self.stack.status = self.stack.COMPLETE
self.assertFalse(self.stack.has_timed_out())
self.stack.status = self.stack.FAILED
self.assertFalse(self.stack.has_timed_out())
def test_no_auth_token(self):
ctx = utils.dummy_context()
ctx.auth_token = None
self.stack = stack.Stack(ctx, 'test_stack', self.tmpl)
self.assertEqual('abcd1234',
ctx.auth_plugin.auth_token)
def test_state_deleted(self):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
action=stack.Stack.CREATE,
status=stack.Stack.IN_PROGRESS)
self.stack.id = '1234'
self.stack.delete()
self.assertIsNone(self.stack.state_set(stack.Stack.CREATE,
stack.Stack.COMPLETE,
'test'))
def test_load_nonexistant_id(self):
self.assertRaises(exception.NotFound, stack.Stack.load,
self.ctx, -1)
def test_total_resources_empty(self):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
status_reason='flimflam')
self.stack.store()
self.assertEqual(0, self.stack.total_resources(self.stack.id))
self.assertEqual(0, self.stack.total_resources())
@mock.patch.object(db_api, 'stack_count_total_resources')
def test_total_resources_not_stored(self, sctr):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
status_reason='flimflam')
self.assertEqual(0, self.stack.total_resources())
sctr.assert_not_called()
def test_total_resources_not_found(self):
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
status_reason='flimflam')
self.assertEqual(0, self.stack.total_resources('1234'))
@mock.patch.object(db_api, 'stack_count_total_resources')
def test_total_resources_generic(self, sctr):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tpl),
status_reason='blarg')
self.stack.store()
sctr.return_value = 1
self.assertEqual(1, self.stack.total_resources(self.stack.id))
self.assertEqual(1, self.stack.total_resources())
def test_resource_get(self):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tpl),
status_reason='blarg')
self.stack.store()
self.assertEqual('A', self.stack.resource_get('A').name)
self.assertEqual(self.stack['A'], self.stack.resource_get('A'))
self.assertIsNone(self.stack.resource_get('B'))
@mock.patch.object(resource_objects.Resource, 'get_all_by_stack')
def test_resource_get_db_fallback(self, gabs):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tpl),
status_reason='blarg')
self.stack.store()
tpl2 = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'GenericResourceType'},
'B': {'Type': 'GenericResourceType'}}}
t2 = template.Template(tpl2)
t2.store(self.ctx)
db_resources = {
'A': mock.MagicMock(),
'B': mock.MagicMock(current_template_id=t2.id),
'C': mock.MagicMock(current_template_id=t2.id)
}
db_resources['A'].name = 'A'
db_resources['B'].name = 'B'
db_resources['C'].name = 'C'
gabs.return_value = db_resources
self.assertEqual('A', self.stack.resource_get('A').name)
self.assertEqual('B', self.stack.resource_get('B').name)
# Ignore the resource if only in db
self.assertIsNone(self.stack.resource_get('C'))
self.assertIsNone(self.stack.resource_get('D'))
@mock.patch.object(resource_objects.Resource, 'get_all_by_stack')
def test_iter_resources(self, mock_db_call):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'GenericResourceType'},
'B': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tpl),
status_reason='blarg')
self.stack.store()
mock_rsc_a = mock.MagicMock(current_template_id=self.stack.t.id)
mock_rsc_a.name = 'A'
mock_rsc_b = mock.MagicMock(current_template_id=self.stack.t.id)
mock_rsc_b.name = 'B'
mock_db_call.return_value = {
'A': mock_rsc_a,
'B': mock_rsc_b
}
all_resources = list(self.stack.iter_resources())
# Verify, the DB query is called with expected filter
mock_db_call.assert_called_once_with(self.ctx, self.stack.id)
# And returns the resources
names = sorted([r.name for r in all_resources])
self.assertEqual(['A', 'B'], names)
@mock.patch.object(resource_objects.Resource, 'get_all_by_stack')
def test_iter_resources_with_nested(self, mock_db_call):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'StackResourceType'},
'B': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tpl),
status_reason='blarg')
self.stack.store()
mock_rsc_a = mock.MagicMock(current_template_id=self.stack.t.id)
mock_rsc_a.name = 'A'
mock_rsc_b = mock.MagicMock(current_template_id=self.stack.t.id)
mock_rsc_b.name = 'B'
mock_db_call.return_value = {
'A': mock_rsc_a,
'B': mock_rsc_b
}
def get_more(nested_depth=0, filters=None):
yield 'X'
yield 'Y'
yield 'Z'
mock_nested = self.patchobject(generic_rsrc.StackResourceType,
'nested')
mock_nested.return_value.iter_resources = mock.MagicMock(
side_effect=get_more)
resource_generator = self.stack.iter_resources()
self.assertIsNot(resource_generator, list)
first_level_resources = list(resource_generator)
self.assertEqual(2, len(first_level_resources))
all_resources = list(self.stack.iter_resources(1))
self.assertEqual(5, len(all_resources))
@mock.patch.object(resource_objects.Resource, 'get_all_by_stack')
def test_iter_resources_with_filters(self, mock_db_call):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'GenericResourceType'},
'B': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tpl),
status_reason='blarg')
self.stack.store()
mock_rsc = mock.MagicMock()
mock_rsc.name = 'A'
mock_rsc.current_template_id = self.stack.t.id
mock_db_call.return_value = {'A': mock_rsc}
all_resources = list(self.stack.iter_resources(
filters=dict(name=['A'])
))
# Verify, the DB query is called with expected filter
mock_db_call.assert_has_calls([
mock.call(self.ctx, self.stack.id, dict(name=['A'])),
mock.call(self.ctx, self.stack.id),
])
# Make sure it returns only one resource.
self.assertEqual(1, len(all_resources))
# And returns the resource A
self.assertEqual('A', all_resources[0].name)
@mock.patch.object(resource_objects.Resource, 'get_all_by_stack')
def test_iter_resources_with_nonexistent_template(self, mock_db_call):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'GenericResourceType'},
'B': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tpl),
status_reason='blarg')
self.stack.store()
mock_rsc_a = mock.MagicMock(current_template_id=self.stack.t.id)
mock_rsc_a.name = 'A'
mock_rsc_b = mock.MagicMock(current_template_id=self.stack.t.id + 1)
mock_rsc_b.name = 'B'
mock_db_call.return_value = {
'A': mock_rsc_a,
'B': mock_rsc_b
}
all_resources = list(self.stack.iter_resources())
self.assertEqual(1, len(all_resources))
@mock.patch.object(resource_objects.Resource, 'get_all_by_stack')
def test_iter_resources_nested_with_filters(self, mock_db_call):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources':
{'A': {'Type': 'StackResourceType'},
'B': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tpl),
status_reason='blarg')
self.stack.store()
mock_rsc_a = mock.MagicMock(current_template_id=self.stack.t.id)
mock_rsc_a.name = 'A'
mock_rsc_b = mock.MagicMock(current_template_id=self.stack.t.id)
mock_rsc_b.name = 'B'
mock_db_call.return_value = {
'A': mock_rsc_a,
'B': mock_rsc_b
}
def get_more(nested_depth=0, filters=None):
if filters:
yield 'X'
mock_nested = self.patchobject(generic_rsrc.StackResourceType,
'nested')
mock_nested.return_value.iter_resources = mock.MagicMock(
side_effect=get_more)
all_resources = list(self.stack.iter_resources(
nested_depth=1,
filters=dict(name=['A'])
))
# Verify, the DB query is called with expected filter
mock_db_call.assert_has_calls([
mock.call(self.ctx, self.stack.id, dict(name=['A'])),
mock.call(self.ctx, self.stack.id),
])
# Returns three resources (1 first level + 2 second level)
self.assertEqual(3, len(all_resources))
def test_load_parent_resource(self):
self.stack = stack.Stack(self.ctx, 'load_parent_resource', self.tmpl,
parent_resource='parent')
self.stack.store()
stk = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
t = template.Template.load(self.ctx, stk.raw_template_id)
self.patchobject(template.Template, 'load', return_value=t)
self.patchobject(stack.Stack, '__init__', return_value=None)
stack.Stack.load(self.ctx, stack_id=self.stack.id)
stack.Stack.__init__.assert_called_once_with(
self.ctx, stk.name, t, stack_id=stk.id,
action=stk.action, status=stk.status,
status_reason=stk.status_reason,
timeout_mins=stk.timeout,
disable_rollback=stk.disable_rollback,
parent_resource='parent', owner_id=None,
stack_user_project_id=None,
created_time=mock.ANY,
updated_time=None,
user_creds_id=stk.user_creds_id,
tenant_id='test_tenant_id',
use_stored_context=False,
username=mock.ANY,
convergence=False,
current_traversal=self.stack.current_traversal,
prev_raw_template_id=None,
current_deps=None, cache_data=None,
nested_depth=0,
deleted_time=None, refresh_cred=False)
template.Template.load.assert_called_once_with(
self.ctx, stk.raw_template_id, stk.raw_template)
def test_identifier(self):
self.stack = stack.Stack(self.ctx, 'identifier_test', self.tmpl)
self.stack.store()
identifier = self.stack.identifier()
self.assertEqual(self.stack.tenant_id, identifier.tenant)
self.assertEqual('identifier_test', identifier.stack_name)
self.assertTrue(identifier.stack_id)
self.assertFalse(identifier.path)
def test_get_stack_abandon_data(self):
tpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Parameters': {'param1': {'Type': 'String'}},
'Resources':
{'A': {'Type': 'GenericResourceType'},
'B': {'Type': 'GenericResourceType'}}}
resources = '''{"A": {"status": "COMPLETE", "name": "A",
"resource_data": {}, "resource_id": null, "action": "INIT",
"type": "GenericResourceType", "metadata": {}},
"B": {"status": "COMPLETE", "name": "B", "resource_data": {},
"resource_id": null, "action": "INIT", "type": "GenericResourceType",
"metadata": {}}}'''
env = environment.Environment({'parameters': {'param1': 'test'}})
self.ctx.tenant_id = '123'
self.stack = stack.Stack(self.ctx, 'stack_details_test',
template.Template(tpl, env=env),
tenant_id=self.ctx.tenant_id,
stack_user_project_id='234',
tags=['tag1', 'tag2'])
self.stack.store()
info = self.stack.prepare_abandon()
self.assertEqual('CREATE', info['action'])
self.assertIn('id', info)
self.assertEqual('stack_details_test', info['name'])
self.assertEqual(json.loads(resources), info['resources'])
self.assertEqual('IN_PROGRESS', info['status'])
self.assertEqual(tpl, info['template'])
self.assertEqual('123', info['project_id'])
self.assertEqual('234', info['stack_user_project_id'])
self.assertEqual(env.params, info['environment']['parameters'])
self.assertEqual(['tag1', 'tag2'], info['tags'])
def test_set_param_id(self):
self.stack = stack.Stack(self.ctx, 'param_arn_test', self.tmpl)
exp_prefix = ('arn:openstack:heat::test_tenant_id'
':stacks/param_arn_test/')
self.assertEqual(self.stack.parameters['AWS::StackId'],
exp_prefix + 'None')
self.stack.store()
identifier = self.stack.identifier()
self.assertEqual(exp_prefix + self.stack.id,
self.stack.parameters['AWS::StackId'])
self.assertEqual(self.stack.parameters['AWS::StackId'],
identifier.arn())
def test_set_param_id_update(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'AResource': {'Type': 'ResourceWithPropsType',
'Metadata': {'Bar': {'Ref': 'AWS::StackId'}},
'Properties': {'Foo': 'abc'}}}}
self.stack = stack.Stack(self.ctx, 'update_stack_arn_test',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
stack_arn = self.stack.parameters['AWS::StackId']
tmpl2 = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'AResource': {'Type': 'ResourceWithPropsType',
'Metadata': {'Bar':
{'Ref': 'AWS::StackId'}},
'Properties': {'Foo': 'xyz'}}}}
updated_stack = stack.Stack(self.ctx, 'updated_stack',
template.Template(tmpl2))
self.stack.update(updated_stack)
self.assertEqual((stack.Stack.UPDATE, stack.Stack.COMPLETE),
self.stack.state)
self.assertEqual('xyz', self.stack['AResource'].properties['Foo'])
self.assertEqual(
stack_arn, self.stack['AResource'].metadata_get()['Bar'])
def test_load_param_id(self):
self.stack = stack.Stack(self.ctx, 'param_load_arn_test', self.tmpl)
self.stack.store()
identifier = self.stack.identifier()
self.assertEqual(self.stack.parameters['AWS::StackId'],
identifier.arn())
newstack = stack.Stack.load(self.ctx, stack_id=self.stack.id)
self.assertEqual(identifier.arn(), newstack.parameters['AWS::StackId'])
def test_load_reads_tenant_id(self):
self.ctx.tenant = 'foobar'
self.stack = stack.Stack(self.ctx, 'stack_name', self.tmpl)
self.stack.store()
stack_id = self.stack.id
self.ctx.tenant = None
self.stack = stack.Stack.load(self.ctx, stack_id=stack_id)
self.assertEqual('foobar', self.stack.tenant_id)
def test_load_reads_username_from_db(self):
self.ctx.username = 'foobar'
self.stack = stack.Stack(self.ctx, 'stack_name', self.tmpl)
self.stack.store()
stack_id = self.stack.id
self.ctx.username = None
stk = stack.Stack.load(self.ctx, stack_id=stack_id)
self.assertEqual('foobar', stk.username)
self.ctx.username = 'not foobar'
stk = stack.Stack.load(self.ctx, stack_id=stack_id)
self.assertEqual('foobar', stk.username)
def test_load_all(self):
stack1 = stack.Stack(self.ctx, 'stack1', self.tmpl)
stack1.store()
stack2 = stack.Stack(self.ctx, 'stack2', self.tmpl)
stack2.store()
stacks = list(stack.Stack.load_all(self.ctx))
self.assertEqual(2, len(stacks))
# Add another, nested, stack
stack3 = stack.Stack(self.ctx, 'stack3', self.tmpl,
owner_id=stack2.id)
stack3.store()
# Should still be 2 without show_nested
stacks = list(stack.Stack.load_all(self.ctx))
self.assertEqual(2, len(stacks))
stacks = list(stack.Stack.load_all(self.ctx, show_nested=True))
self.assertEqual(3, len(stacks))
# A backup stack should not be returned
stack1._backup_stack()
stacks = list(stack.Stack.load_all(self.ctx))
self.assertEqual(2, len(stacks))
stacks = list(stack.Stack.load_all(self.ctx, show_nested=True))
self.assertEqual(3, len(stacks))
def test_load_all_not_found(self):
stack1 = stack.Stack(self.ctx, 'stack1', self.tmpl)
stack1.store()
tmpl2 = template.Template(copy.deepcopy(empty_template))
stack2 = stack.Stack(self.ctx, 'stack2', tmpl2)
stack2.store()
def fake_load(ctx, template_id, tmpl):
if template_id == stack2.t.id:
raise exception.NotFound()
else:
return tmpl2
with mock.patch.object(template.Template, 'load') as tmpl_load:
tmpl_load.side_effect = fake_load
stacks = list(stack.Stack.load_all(self.ctx))
self.assertEqual(1, len(stacks))
def test_created_time(self):
self.stack = stack.Stack(self.ctx, 'creation_time_test', self.tmpl)
self.assertIsNone(self.stack.created_time)
self.stack.store()
self.assertIsNotNone(self.stack.created_time)
def test_updated_time(self):
self.stack = stack.Stack(self.ctx, 'updated_time_test',
self.tmpl)
self.assertIsNone(self.stack.updated_time)
self.stack.store()
self.stack.create()
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'R1': {'Type': 'GenericResourceType'}}}
newstack = stack.Stack(self.ctx, 'updated_time_test',
template.Template(tmpl))
self.stack.update(newstack)
self.assertIsNotNone(self.stack.updated_time)
def test_update_prev_raw_template(self):
self.stack = stack.Stack(self.ctx, 'updated_time_test',
self.tmpl)
self.assertIsNone(self.stack.updated_time)
self.stack.store()
self.stack.create()
self.assertIsNone(self.stack.prev_raw_template_id)
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'R1': {'Type': 'GenericResourceType'}}}
newstack = stack.Stack(self.ctx, 'updated_time_test',
template.Template(tmpl))
self.stack.update(newstack)
self.assertIsNotNone(self.stack.prev_raw_template_id)
prev_t = template.Template.load(self.ctx,
self.stack.prev_raw_template_id)
self.assertEqual(tmpl, prev_t.t)
prev_id = self.stack.prev_raw_template_id
tmpl2 = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'R2': {'Type': 'GenericResourceType'}}}
newstack2 = stack.Stack(self.ctx, 'updated_time_test',
template.Template(tmpl2))
self.stack.update(newstack2)
self.assertIsNotNone(self.stack.prev_raw_template_id)
self.assertNotEqual(prev_id, self.stack.prev_raw_template_id)
prev_t2 = template.Template.load(self.ctx,
self.stack.prev_raw_template_id)
self.assertEqual(tmpl2, prev_t2.t)
self.assertRaises(exception.NotFound,
template.Template.load, self.ctx, prev_id)
def test_access_policy_update(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'R1': {'Type': 'GenericResourceType'},
'Policy': {
'Type': 'OS::Heat::AccessPolicy',
'Properties': {
'AllowedResources': ['R1']
}}}}
self.stack = stack.Stack(self.ctx, 'update_stack_access_policy_test',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
tmpl2 = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'R1': {'Type': 'GenericResourceType'},
'R2': {'Type': 'GenericResourceType'},
'Policy': {
'Type': 'OS::Heat::AccessPolicy',
'Properties': {
'AllowedResources': ['R1', 'R2'],
}}}}
updated_stack = stack.Stack(self.ctx, 'updated_stack',
template.Template(tmpl2))
self.stack.update(updated_stack)
self.assertEqual((stack.Stack.UPDATE, stack.Stack.COMPLETE),
self.stack.state)
def test_abandon_nodelete_project(self):
self.stack = stack.Stack(self.ctx, 'delete_trust', self.tmpl)
stack_id = self.stack.store()
self.stack.set_stack_user_project_id(project_id='aproject456')
db_s = stack_object.Stack.get_by_id(self.ctx, stack_id)
self.assertIsNotNone(db_s)
self.stack.delete(abandon=True)
db_s = stack_object.Stack.get_by_id(self.ctx, stack_id)
self.assertIsNone(db_s)
self.assertEqual((stack.Stack.DELETE, stack.Stack.COMPLETE),
self.stack.state)
def test_suspend_resume(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'suspend_test',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
self.assertIsNone(self.stack.updated_time)
self.stack.suspend()
self.assertEqual((self.stack.SUSPEND, self.stack.COMPLETE),
self.stack.state)
stack_suspend_time = self.stack.updated_time
self.assertIsNotNone(stack_suspend_time)
self.stack.resume()
self.assertEqual((self.stack.RESUME, self.stack.COMPLETE),
self.stack.state)
self.assertNotEqual(stack_suspend_time, self.stack.updated_time)
def test_suspend_stack_suspended_ok(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'suspend_test',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
self.stack.suspend()
self.assertEqual((self.stack.SUSPEND, self.stack.COMPLETE),
self.stack.state)
# unexpected to call Resource.suspend
self.patchobject(generic_rsrc.GenericResource, 'suspend')
self.stack.suspend()
self.assertEqual((self.stack.SUSPEND, self.stack.COMPLETE),
self.stack.state)
generic_rsrc.GenericResource.suspend.assert_not_called()
def test_resume_stack_resumeed_ok(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'suspend_test',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
self.stack.suspend()
self.assertEqual((self.stack.SUSPEND, self.stack.COMPLETE),
self.stack.state)
self.stack.resume()
self.assertEqual((self.stack.RESUME, self.stack.COMPLETE),
self.stack.state)
# unexpected to call Resource.resume
self.patchobject(generic_rsrc.GenericResource, 'resume')
self.stack.resume()
self.assertEqual((self.stack.RESUME, self.stack.COMPLETE),
self.stack.state)
generic_rsrc.GenericResource.resume.assert_not_called()
def test_suspend_fail(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
exc = Exception('foo')
self.patchobject(generic_rsrc.GenericResource, 'handle_suspend',
side_effect=exc)
self.stack = stack.Stack(self.ctx, 'suspend_test_fail',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
self.stack.suspend()
self.assertEqual((self.stack.SUSPEND, self.stack.FAILED),
self.stack.state)
self.assertEqual('Resource SUSPEND failed: Exception: '
'resources.AResource: foo',
self.stack.status_reason)
generic_rsrc.GenericResource.handle_suspend.assert_called_once_with()
def test_resume_fail(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
self.patchobject(generic_rsrc.GenericResource, 'handle_resume',
side_effect=Exception('foo'))
self.stack = stack.Stack(self.ctx, 'resume_test_fail',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
self.stack.suspend()
self.assertEqual((self.stack.SUSPEND, self.stack.COMPLETE),
self.stack.state)
self.stack.resume()
self.assertEqual((self.stack.RESUME, self.stack.FAILED),
self.stack.state)
self.assertEqual('Resource RESUME failed: Exception: '
'resources.AResource: foo',
self.stack.status_reason)
def test_suspend_timeout(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
exc = scheduler.Timeout('foo', 0)
self.patchobject(generic_rsrc.GenericResource, 'handle_suspend',
side_effect=exc)
self.stack = stack.Stack(self.ctx, 'suspend_test_fail_timeout',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
self.stack.suspend()
self.assertEqual((self.stack.SUSPEND, self.stack.FAILED),
self.stack.state)
self.assertEqual('Suspend timed out', self.stack.status_reason)
generic_rsrc.GenericResource.handle_suspend.assert_called_once_with()
def test_resume_timeout(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
exc = scheduler.Timeout('foo', 0)
self.patchobject(generic_rsrc.GenericResource, 'handle_resume',
side_effect=exc)
self.stack = stack.Stack(self.ctx, 'resume_test_fail_timeout',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
self.stack.suspend()
self.assertEqual((self.stack.SUSPEND, self.stack.COMPLETE),
self.stack.state)
self.stack.resume()
self.assertEqual((self.stack.RESUME, self.stack.FAILED),
self.stack.state)
self.assertEqual('Resume timed out', self.stack.status_reason)
generic_rsrc.GenericResource.handle_resume.assert_called_once_with()
def _get_stack_to_check(self, name):
tpl = {"HeatTemplateFormatVersion": "2012-12-12",
"Resources": {
"A": {"Type": "GenericResourceType"},
"B": {"Type": "GenericResourceType"}}}
self.stack = stack.Stack(self.ctx, name, template.Template(tpl),
status_reason=name)
self.stack.store()
def _mock_check(res):
res.handle_check = mock.Mock()
[_mock_check(res) for res in self.stack.resources.values()]
return self.stack
def test_check_supported(self):
stack1 = self._get_stack_to_check('check-supported')
stack1['A'].state_set(stack1['A'].CREATE, stack1['A'].COMPLETE)
stack1['B'].state_set(stack1['B'].CREATE, stack1['B'].COMPLETE)
stack1.check()
self.assertEqual(stack1.COMPLETE, stack1.status)
self.assertEqual(stack1.CHECK, stack1.action)
[self.assertTrue(res.handle_check.called)
for res in stack1.resources.values()]
self.assertNotIn('not fully supported', stack1.status_reason)
def test_check_not_supported(self):
stack1 = self._get_stack_to_check('check-not-supported')
del stack1['B'].handle_check
stack1['A'].state_set(stack1['A'].CREATE, stack1['A'].COMPLETE)
stack1.check()
self.assertEqual(stack1.COMPLETE, stack1.status)
self.assertEqual(stack1.CHECK, stack1.action)
self.assertTrue(stack1['A'].handle_check.called)
self.assertIn('not fully supported', stack1.status_reason)
def test_check_fail(self):
stk = self._get_stack_to_check('check-fail')
# if resource not created, check fail
stk.check()
self.assertEqual(stk.FAILED, stk.status)
self.assertEqual(stk.CHECK, stk.action)
self.assertFalse(stk['A'].handle_check.called)
self.assertFalse(stk['B'].handle_check.called)
self.assertIn('Resource A not created yet',
stk.status_reason)
self.assertIn('Resource B not created yet',
stk.status_reason)
# check if resource created
stk['A'].handle_check.side_effect = Exception('fail-A')
stk['B'].handle_check.side_effect = Exception('fail-B')
stk['A'].state_set(stk['A'].CREATE, stk['A'].COMPLETE)
stk['B'].state_set(stk['B'].CREATE, stk['B'].COMPLETE)
stk.check()
self.assertEqual(stk.FAILED, stk.status)
self.assertEqual(stk.CHECK, stk.action)
self.assertTrue(stk['A'].handle_check.called)
self.assertTrue(stk['B'].handle_check.called)
self.assertIn('fail-A', stk.status_reason)
self.assertIn('fail-B', stk.status_reason)
def test_adopt_stack(self):
adopt_data = '''{
"action": "CREATE",
"status": "COMPLETE",
"name": "my-test-stack-name",
"resources": {
"AResource": {
"status": "COMPLETE",
"name": "AResource",
"resource_data": {},
"metadata": {},
"resource_id": "test-res-id",
"action": "CREATE",
"type": "GenericResourceType"
}
}
}'''
tmpl = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}},
'Outputs': {'TestOutput': {'Value': {
'Fn::GetAtt': ['AResource', 'Foo']}}
}
}
self.stack = stack.Stack(utils.dummy_context(), 'test_stack',
template.Template(tmpl),
adopt_stack_data=json.loads(adopt_data))
self.stack.store()
self.stack.adopt()
res = self.stack['AResource']
self.assertEqual(u'test-res-id', res.resource_id)
self.assertEqual('AResource', res.name)
self.assertEqual('COMPLETE', res.status)
self.assertEqual('ADOPT', res.action)
self.assertEqual((self.stack.ADOPT, self.stack.COMPLETE),
self.stack.state)
loaded_stack = stack.Stack.load(self.ctx, self.stack.id)
loaded_stack._update_all_resource_data(False, True)
self.assertEqual('AResource',
loaded_stack.outputs['TestOutput'].get_value())
self.assertIsNone(loaded_stack['AResource']._stored_properties_data)
def test_adopt_stack_fails(self):
adopt_data = '''{
"action": "CREATE",
"status": "COMPLETE",
"name": "my-test-stack-name",
"resources": {}
}'''
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'},
}
})
self.stack = stack.Stack(utils.dummy_context(), 'test_stack',
tmpl,
adopt_stack_data=json.loads(adopt_data))
self.stack.store()
self.stack.adopt()
self.assertEqual((self.stack.ADOPT, self.stack.FAILED),
self.stack.state)
expected = ('Resource ADOPT failed: Exception: resources.foo: '
'Resource ID was not provided.')
self.assertEqual(expected, self.stack.status_reason)
def test_adopt_stack_rollback(self):
adopt_data = '''{
"name": "my-test-stack-name",
"resources": {}
}'''
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'},
}
})
self.stack = stack.Stack(utils.dummy_context(),
'test_stack',
tmpl,
disable_rollback=False,
adopt_stack_data=json.loads(adopt_data))
self.stack.store()
with mock.patch.object(self.stack, 'delete',
side_effect=self.stack.delete) as mock_delete:
self.stack.adopt()
self.assertEqual((self.stack.ROLLBACK, self.stack.COMPLETE),
self.stack.state)
mock_delete.assert_called_once_with(action=self.stack.ROLLBACK,
abandon=True)
def test_resource_by_refid(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'resource_by_refid_stack',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
self.assertIn('AResource', self.stack)
rsrc = self.stack['AResource']
rsrc.resource_id_set('aaaa')
for action, status in (
(rsrc.INIT, rsrc.COMPLETE),
(rsrc.CREATE, rsrc.IN_PROGRESS),
(rsrc.CREATE, rsrc.COMPLETE),
(rsrc.RESUME, rsrc.IN_PROGRESS),
(rsrc.RESUME, rsrc.COMPLETE),
(rsrc.UPDATE, rsrc.IN_PROGRESS),
(rsrc.UPDATE, rsrc.COMPLETE),
(rsrc.CHECK, rsrc.COMPLETE)):
rsrc.state_set(action, status)
stk_defn.update_resource_data(self.stack.defn, rsrc.name,
rsrc.node_data())
self.assertEqual(rsrc, self.stack.resource_by_refid('aaaa'))
rsrc.state_set(rsrc.DELETE, rsrc.IN_PROGRESS)
stk_defn.update_resource_data(self.stack.defn, rsrc.name,
rsrc.node_data())
try:
self.assertIsNone(self.stack.resource_by_refid('aaaa'))
self.assertIsNone(self.stack.resource_by_refid('bbbb'))
finally:
rsrc.state_set(rsrc.CREATE, rsrc.COMPLETE)
def test_resource_name_ref_by_depends_on(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'AResource': {'Type': 'GenericResourceType'},
'BResource': {'Type': 'ResourceWithPropsType',
'Properties': {'Foo': 'AResource'},
'DependsOn': 'AResource'}}}
self.stack = stack.Stack(self.ctx, 'resource_by_name_ref_stack',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
self.assertIn('AResource', self.stack)
self.assertIn('BResource', self.stack)
rsrc = self.stack['AResource']
rsrc.resource_id_set('aaaa')
b_rsrc = self.stack['BResource']
b_rsrc.resource_id_set('bbbb')
b_foo_ref = b_rsrc.properties.get('Foo')
for action, status in (
(rsrc.INIT, rsrc.COMPLETE),
(rsrc.CREATE, rsrc.IN_PROGRESS),
(rsrc.CREATE, rsrc.COMPLETE),
(rsrc.RESUME, rsrc.IN_PROGRESS),
(rsrc.RESUME, rsrc.COMPLETE),
(rsrc.UPDATE, rsrc.IN_PROGRESS),
(rsrc.UPDATE, rsrc.COMPLETE)):
rsrc.state_set(action, status)
ref_rsrc = self.stack.resource_by_refid(b_foo_ref)
self.assertEqual(rsrc, ref_rsrc)
self.assertIn(b_rsrc.name, ref_rsrc.required_by())
def test_create_failure_recovery(self):
"""Check that rollback still works with dynamic metadata.
This test fails the second instance.
"""
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'AResource': {'Type': 'OverwrittenFnGetRefIdType',
'Properties': {'Foo': 'abc'}},
'BResource': {'Type': 'ResourceWithPropsType',
'Properties': {
'Foo': {'Ref': 'AResource'}}}}}
self.stack = stack.Stack(self.ctx, 'update_test_stack',
template.Template(tmpl),
disable_rollback=True)
class FakeException(Exception):
# to avoid pep8 check
pass
mock_create = self.patchobject(generic_rsrc.ResourceWithFnGetRefIdType,
'handle_create',
side_effect=[FakeException, None])
mock_delete = self.patchobject(generic_rsrc.ResourceWithFnGetRefIdType,
'handle_delete', return_value=None)
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.FAILED),
self.stack.state)
self.assertEqual('abc', self.stack['AResource'].properties['Foo'])
updated_stack = stack.Stack(self.ctx, 'updated_stack',
template.Template(tmpl),
disable_rollback=True)
self.stack.update(updated_stack)
self.assertEqual((stack.Stack.UPDATE, stack.Stack.COMPLETE),
self.stack.state)
self.assertEqual(
'abc',
self.stack['AResource']._stored_properties_data['Foo'])
self.assertEqual(
'ID-AResource',
self.stack['BResource']._stored_properties_data['Foo'])
mock_delete.assert_called_once_with()
self.assertEqual(2, mock_create.call_count)
def test_create_bad_attribute(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'AResource': {'Type': 'GenericResourceType'},
'BResource': {'Type': 'ResourceWithPropsType',
'Properties': {
'Foo': {'Fn::GetAtt': ['AResource',
'Foo']}}}}}
self.stack = stack.Stack(self.ctx, 'bad_attr_test_stack',
template.Template(tmpl),
disable_rollback=True)
self.patchobject(generic_rsrc.ResourceWithProps,
'_update_stored_properties',
side_effect=exception.InvalidTemplateAttribute(
resource='a', key='foo'))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.FAILED),
self.stack.state)
self.assertEqual('Resource CREATE failed: The Referenced Attribute '
'(a foo) is incorrect.', self.stack.status_reason)
def test_stack_create_timeout(self):
def dummy_task():
while True:
yield
self.patchobject(scheduler.DependencyTaskGroup, '__call__',
return_value=dummy_task())
stk = stack.Stack(self.ctx, 's', self.tmpl)
start_time = time.time()
self.patchobject(timeutils, 'wallclock',
side_effect=[start_time, start_time + 1,
start_time + stk.timeout_secs() + 1])
stk.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.FAILED), stk.state)
self.assertEqual('Create timed out', stk.status_reason)
self.assertEqual(3, timeutils.wallclock.call_count)
def test_stack_name_valid(self):
stk = stack.Stack(self.ctx, 's', self.tmpl)
self.assertIsInstance(stk, stack.Stack)
stk = stack.Stack(self.ctx, 'stack123', self.tmpl)
self.assertIsInstance(stk, stack.Stack)
stk = stack.Stack(self.ctx, 'test.stack', self.tmpl)
self.assertIsInstance(stk, stack.Stack)
stk = stack.Stack(self.ctx, 'test_stack', self.tmpl)
self.assertIsInstance(stk, stack.Stack)
stk = stack.Stack(self.ctx, 'TEST', self.tmpl)
self.assertIsInstance(stk, stack.Stack)
stk = stack.Stack(self.ctx, 'test-stack', self.tmpl)
self.assertIsInstance(stk, stack.Stack)
def test_stack_name_invalid(self):
gt_255_chars = ('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuv')
stack_names = ['_foo', '1bad', '.kcats', 'test stack', ' teststack',
'^-^', '"stack"', '1234', 'cat|dog', '$(foo)',
'test/stack', 'test\\stack', 'test::stack',
'test;stack', 'test~stack', '#test', gt_255_chars]
for stack_name in stack_names:
ex = self.assertRaises(
exception.StackValidationFailed, stack.Stack,
self.ctx, stack_name, self.tmpl)
self.assertIn("Invalid stack name %s must contain" % stack_name,
str(ex))
def test_stack_name_invalid_type(self):
stack_names = [{"bad": 123}, ["no", "lists"]]
for stack_name in stack_names:
ex = self.assertRaises(
exception.StackValidationFailed, stack.Stack,
self.ctx, stack_name, self.tmpl)
self.assertIn("Invalid stack name %s, must be a string"
% stack_name, str(ex))
def test_resource_state_get_att(self):
tmpl = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}},
'Outputs': {'TestOutput': {'Value': {
'Fn::GetAtt': ['AResource', 'Foo']}}
}
}
self.stack = stack.Stack(self.ctx, 'resource_state_get_att',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
self.assertIn('AResource', self.stack)
rsrc = self.stack['AResource']
rsrc.resource_id_set('aaaa')
self.assertEqual('AResource', rsrc.FnGetAtt('Foo'))
for action, status in (
(rsrc.CREATE, rsrc.IN_PROGRESS),
(rsrc.CREATE, rsrc.COMPLETE),
(rsrc.CREATE, rsrc.FAILED),
(rsrc.SUSPEND, rsrc.IN_PROGRESS),
(rsrc.SUSPEND, rsrc.COMPLETE),
(rsrc.RESUME, rsrc.IN_PROGRESS),
(rsrc.RESUME, rsrc.COMPLETE),
(rsrc.UPDATE, rsrc.IN_PROGRESS),
(rsrc.UPDATE, rsrc.FAILED),
(rsrc.UPDATE, rsrc.COMPLETE),
(rsrc.DELETE, rsrc.IN_PROGRESS),
(rsrc.DELETE, rsrc.FAILED),
(rsrc.DELETE, rsrc.COMPLETE)):
rsrc.state_set(action, status)
self.stack._update_all_resource_data(False, True)
self.assertEqual('AResource',
self.stack.outputs['TestOutput'].get_value())
def test_resource_required_by(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'},
'BResource': {'Type': 'GenericResourceType',
'DependsOn': 'AResource'},
'CResource': {'Type': 'GenericResourceType',
'DependsOn': 'BResource'},
'DResource': {'Type': 'GenericResourceType',
'DependsOn': 'BResource'}}}
self.stack = stack.Stack(self.ctx, 'depends_test_stack',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
self.assertEqual(['BResource'],
self.stack['AResource'].required_by())
self.assertEqual([],
self.stack['CResource'].required_by())
required_by = self.stack['BResource'].required_by()
self.assertEqual(2, len(required_by))
for r in ['CResource', 'DResource']:
self.assertIn(r, required_by)
def test_resource_multi_required_by(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'},
'BResource': {'Type': 'GenericResourceType'},
'CResource': {'Type': 'GenericResourceType'},
'DResource': {'Type': 'GenericResourceType',
'DependsOn': ['AResource',
'BResource',
'CResource']}}}
self.stack = stack.Stack(self.ctx, 'depends_test_stack',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
for r in ['AResource', 'BResource', 'CResource']:
self.assertEqual(['DResource'],
self.stack[r].required_by())
def test_store_saves_owner(self):
"""owner_id attribute of Store is saved to the database when stored."""
self.stack = stack.Stack(self.ctx, 'owner_stack', self.tmpl)
stack_ownee = stack.Stack(self.ctx, 'ownee_stack', self.tmpl,
owner_id=self.stack.id)
stack_ownee.store()
db_stack = stack_object.Stack.get_by_id(self.ctx, stack_ownee.id)
self.assertEqual(self.stack.id, db_stack.owner_id)
def test_init_user_creds_id(self):
ctx_init = utils.dummy_context(user='my_user',
password='my_pass')
ctx_init.request_id = self.ctx.request_id
creds = ucreds_object.UserCreds.create(ctx_init)
self.stack = stack.Stack(self.ctx, 'creds_init', self.tmpl,
user_creds_id=creds.id)
self.stack.store()
self.assertEqual(creds.id, self.stack.user_creds_id)
ctx_expected = ctx_init.to_dict()
ctx_expected['auth_token'] = None
self.assertEqual(ctx_expected, self.stack.stored_context().to_dict())
def test_tags_property_get_set(self):
self.stack = stack.Stack(self.ctx, 'stack_tags', self.tmpl)
self.stack.store()
stack_id = self.stack.id
test_stack = stack.Stack.load(self.ctx, stack_id=stack_id)
self.assertIsNone(test_stack._tags)
self.assertEqual([], test_stack.tags)
self.stack = stack.Stack(self.ctx, 'stack_name', self.tmpl)
self.stack.tags = ['tag1', 'tag2']
self.assertEqual(['tag1', 'tag2'], self.stack._tags)
self.stack.store()
stack_id = self.stack.id
test_stack = stack.Stack.load(self.ctx, stack_id=stack_id)
self.assertIsNone(test_stack._tags)
self.assertEqual(['tag1', 'tag2'], test_stack.tags)
self.assertEqual(['tag1', 'tag2'], test_stack._tags)
def test_load_reads_tags(self):
self.stack = stack.Stack(self.ctx, 'stack_tags', self.tmpl)
self.stack.store()
stack_id = self.stack.id
test_stack = stack.Stack.load(self.ctx, stack_id=stack_id)
self.assertEqual([], test_stack.tags)
self.stack = stack.Stack(self.ctx, 'stack_name', self.tmpl,
tags=['tag1', 'tag2'])
self.stack.store()
stack_id = self.stack.id
test_stack = stack.Stack.load(self.ctx, stack_id=stack_id)
self.assertEqual(['tag1', 'tag2'], test_stack.tags)
def test_store_saves_tags(self):
self.stack = stack.Stack(self.ctx, 'tags_stack', self.tmpl)
self.stack.store()
db_tags = stack_tag_object.StackTagList.get(self.stack.context,
self.stack.id)
self.assertIsNone(db_tags)
self.stack = stack.Stack(self.ctx, 'tags_stack2', self.tmpl,
tags=['tag1', 'tag2'])
self.stack.store()
db_tags = stack_tag_object.StackTagList.get(self.stack.context,
self.stack.id)
self.assertEqual('tag1', db_tags[0].tag)
self.assertEqual('tag2', db_tags[1].tag)
def test_store_saves_creds(self):
"""A user_creds entry is created on first stack store."""
cfg.CONF.set_default('deferred_auth_method', 'password')
self.stack = stack.Stack(self.ctx, 'creds_stack', self.tmpl)
self.stack.store()
# The store should've created a user_creds row and set user_creds_id
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
user_creds_id = db_stack.user_creds_id
self.assertIsNotNone(user_creds_id)
# should've stored the username/password in the context
user_creds = ucreds_object.UserCreds.get_by_id(self.ctx, user_creds_id)
self.assertEqual(self.ctx.username, user_creds.get('username'))
self.assertEqual(self.ctx.password, user_creds.get('password'))
self.assertIsNone(user_creds.get('trust_id'))
self.assertIsNone(user_creds.get('trustor_user_id'))
# Check the stored_context is as expected
expected_context = context.RequestContext.from_dict(self.ctx.to_dict())
expected_context.auth_token = None
stored_context = self.stack.stored_context().to_dict()
self.assertEqual(expected_context.to_dict(), stored_context)
# Store again, ID should not change
self.stack.store()
self.assertEqual(user_creds_id, db_stack.user_creds_id)
def test_store_saves_creds_trust(self):
"""A user_creds entry is created on first stack store."""
cfg.CONF.set_override('deferred_auth_method', 'trusts')
self.patchobject(keystone.KeystoneClientPlugin, '_create',
return_value=fake_ks.FakeKeystoneClient(
user_id='auser123'))
self.stack = stack.Stack(self.ctx, 'creds_stack', self.tmpl)
self.stack.store()
# The store should've created a user_creds row and set user_creds_id
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
user_creds_id = db_stack.user_creds_id
self.assertIsNotNone(user_creds_id)
# should've stored the trust_id and trustor_user_id returned from
# FakeKeystoneClient.create_trust_context, username/password should
# not have been stored
user_creds = ucreds_object.UserCreds.get_by_id(self.ctx, user_creds_id)
self.assertIsNone(user_creds.get('username'))
self.assertIsNone(user_creds.get('password'))
self.assertEqual('atrust', user_creds.get('trust_id'))
self.assertEqual('auser123', user_creds.get('trustor_user_id'))
auth = self.patchobject(context.RequestContext,
'trusts_auth_plugin')
self.patchobject(auth, 'get_access',
return_value=fakes.FakeAccessInfo([], None, None))
# Check the stored_context is as expected
expected_context = context.RequestContext(
trust_id='atrust', trustor_user_id='auser123',
request_id=self.ctx.request_id, is_admin=False).to_dict()
stored_context = self.stack.stored_context().to_dict()
self.assertEqual(expected_context, stored_context)
# Store again, ID should not change
self.stack.store()
self.assertEqual(user_creds_id, db_stack.user_creds_id)
keystone.KeystoneClientPlugin._create.assert_called_with()
def test_backup_copies_user_creds_id(self):
ctx_init = utils.dummy_context(user='my_user',
password='my_pass')
ctx_init.request_id = self.ctx.request_id
creds = ucreds_object.UserCreds.create(ctx_init)
self.stack = stack.Stack(self.ctx, 'creds_init', self.tmpl,
user_creds_id=creds.id)
self.stack.store()
self.assertEqual(creds.id, self.stack.user_creds_id)
backup = self.stack._backup_stack()
self.assertEqual(creds.id, backup.user_creds_id)
def test_stored_context_err(self):
"""Test stored_context error path."""
self.stack = stack.Stack(self.ctx, 'creds_stack', self.tmpl)
ex = self.assertRaises(exception.Error, self.stack.stored_context)
expected_err = 'Attempt to use stored_context with no user_creds'
self.assertEqual(expected_err, str(ex))
def test_store_gets_username_from_stack(self):
self.stack = stack.Stack(self.ctx, 'username_stack',
self.tmpl, username='foobar')
self.ctx.username = 'not foobar'
self.stack.store()
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
self.assertEqual('foobar', db_stack.username)
def test_store_backup_true(self):
self.stack = stack.Stack(self.ctx, 'username_stack',
self.tmpl, username='foobar')
self.ctx.username = 'not foobar'
self.stack.store(backup=True)
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
self.assertTrue(db_stack.backup)
def test_store_backup_false(self):
self.stack = stack.Stack(self.ctx, 'username_stack',
self.tmpl, username='foobar')
self.ctx.username = 'not foobar'
self.stack.store(backup=False)
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
self.assertFalse(db_stack.backup)
def test_init_stored_context_false(self):
ctx_init = utils.dummy_context(user='mystored_user',
password='mystored_pass')
ctx_init.request_id = self.ctx.request_id
creds = ucreds_object.UserCreds.create(ctx_init)
self.stack = stack.Stack(self.ctx, 'creds_store1', self.tmpl,
user_creds_id=creds.id,
use_stored_context=False)
ctx_expected = self.ctx.to_dict()
self.assertEqual(ctx_expected, self.stack.context.to_dict())
self.stack.store()
self.assertEqual(ctx_expected, self.stack.context.to_dict())
def test_init_stored_context_true(self):
ctx_init = utils.dummy_context(user='mystored_user',
password='mystored_pass')
ctx_init.request_id = self.ctx.request_id
creds = ucreds_object.UserCreds.create(ctx_init)
self.stack = stack.Stack(self.ctx, 'creds_store2', self.tmpl,
user_creds_id=creds.id,
use_stored_context=True)
ctx_expected = ctx_init.to_dict()
ctx_expected['auth_token'] = None
self.assertEqual(ctx_expected, self.stack.context.to_dict())
self.stack.store()
self.assertEqual(ctx_expected, self.stack.context.to_dict())
def test_load_stored_context_false(self):
ctx_init = utils.dummy_context(user='mystored_user',
password='mystored_pass')
ctx_init.request_id = self.ctx.request_id
creds = ucreds_object.UserCreds.create(ctx_init)
self.stack = stack.Stack(self.ctx, 'creds_store3', self.tmpl,
user_creds_id=creds.id)
self.stack.store()
load_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id,
use_stored_context=False)
self.assertEqual(self.ctx.to_dict(), load_stack.context.to_dict())
def test_load_stored_context_true(self):
ctx_init = utils.dummy_context(user='mystored_user',
password='mystored_pass')
ctx_init.request_id = self.ctx.request_id
creds = ucreds_object.UserCreds.create(ctx_init)
self.stack = stack.Stack(self.ctx, 'creds_store4', self.tmpl,
user_creds_id=creds.id)
self.stack.store()
ctx_expected = ctx_init.to_dict()
ctx_expected['auth_token'] = None
load_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id,
use_stored_context=True)
self.assertEqual(ctx_expected, load_stack.context.to_dict())
def test_load_honors_owner(self):
"""Loading a stack from the database will set the owner_id.
Loading a stack from the database will set the owner_id of the
resultant stack appropriately.
"""
self.stack = stack.Stack(self.ctx, 'owner_stack', self.tmpl)
stack_ownee = stack.Stack(self.ctx, 'ownee_stack', self.tmpl,
owner_id=self.stack.id)
stack_ownee.store()
saved_stack = stack.Stack.load(self.ctx, stack_id=stack_ownee.id)
self.assertEqual(self.stack.id, saved_stack.owner_id)
def _test_load_with_refresh_cred(self, refresh=True):
cfg.CONF.set_override('deferred_auth_method', 'trusts')
self.patchobject(self.ctx.auth_plugin, 'get_user_id',
return_value='old_trustor_user_id')
self.patchobject(self.ctx.auth_plugin, 'get_project_id',
return_value='test_tenant_id')
old_context = utils.dummy_context()
old_context.trust_id = 'atrust123'
old_context.trustor_user_id = (
'trustor_user_id' if refresh else 'old_trustor_user_id')
m_sc = self.patchobject(context, 'StoredContext')
m_sc.from_dict.return_value = old_context
self.stack = stack.Stack(self.ctx, 'test_regenerate_trust', self.tmpl)
self.stack.store()
load_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id,
check_refresh_cred=True)
self.assertEqual(refresh, load_stack.refresh_cred)
def test_load_with_refresh_cred(self):
self._test_load_with_refresh_cred()
def test_load_with_no_refresh_cred(self):
self._test_load_with_refresh_cred(refresh=False)
def test_requires_deferred_auth(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'},
'BResource': {'Type': 'GenericResourceType'},
'CResource': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'update_test_stack',
template.Template(tmpl),
disable_rollback=False)
self.assertFalse(self.stack.requires_deferred_auth())
self.stack['CResource'].requires_deferred_auth = True
self.assertTrue(self.stack.requires_deferred_auth())
def test_stack_user_project_id_default(self):
self.stack = stack.Stack(self.ctx, 'user_project_none', self.tmpl)
self.stack.store()
self.assertIsNone(self.stack.stack_user_project_id)
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
self.assertIsNone(db_stack.stack_user_project_id)
def test_stack_user_project_id_constructor(self):
self.stub_keystoneclient()
self.stack = stack.Stack(self.ctx, 'user_project_init',
self.tmpl,
stack_user_project_id='aproject1234')
self.stack.store()
self.assertEqual('aproject1234', self.stack.stack_user_project_id)
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
self.assertEqual('aproject1234', db_stack.stack_user_project_id)
self.stack.delete()
self.assertEqual((stack.Stack.DELETE, stack.Stack.COMPLETE),
self.stack.state)
def test_stack_user_project_id_setter(self):
self.stub_keystoneclient()
self.stack = stack.Stack(self.ctx, 'user_project_init', self.tmpl)
self.stack.store()
self.assertIsNone(self.stack.stack_user_project_id)
self.stack.set_stack_user_project_id(project_id='aproject456')
self.assertEqual('aproject456', self.stack.stack_user_project_id)
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
self.assertEqual('aproject456', db_stack.stack_user_project_id)
self.stack.delete()
self.assertEqual((stack.Stack.DELETE, stack.Stack.COMPLETE),
self.stack.state)
def test_stack_user_project_id_create(self):
self.stub_keystoneclient()
self.stack = stack.Stack(self.ctx, 'user_project_init', self.tmpl)
self.stack.store()
self.assertIsNone(self.stack.stack_user_project_id)
self.stack.create_stack_user_project_id()
self.assertEqual('aprojectid', self.stack.stack_user_project_id)
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
self.assertEqual('aprojectid', db_stack.stack_user_project_id)
self.stack.delete()
self.assertEqual((stack.Stack.DELETE, stack.Stack.COMPLETE),
self.stack.state)
def test_stack_eager_or_lazy_load_templ(self):
self.stack = stack.Stack(self.ctx, 'test_stack_eager_or_lazy_tmpl',
self.tmpl)
self.stack.store()
ctx1 = utils.dummy_context()
s1_db_result = db_api.stack_get(ctx1, self.stack.id, eager_load=True)
s1_obj = stack_object.Stack._from_db_object(ctx1, stack_object.Stack(),
s1_db_result)
self.assertIsNotNone(s1_obj._raw_template)
self.assertIsNotNone(s1_obj.raw_template)
ctx2 = utils.dummy_context()
s2_db_result = db_api.stack_get(ctx2, self.stack.id, eager_load=False)
s2_obj = stack_object.Stack._from_db_object(ctx2, stack_object.Stack(),
s2_db_result)
# _raw_template has not been set since it not eagerly loaded
self.assertFalse(hasattr(s2_obj, "_raw_template"))
# accessing raw_template lazy loads it
self.assertIsNotNone(s2_obj.raw_template)
self.assertIsNotNone(s2_obj._raw_template)
def test_preview_resources_returns_list_of_resource_previews(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'preview_stack',
template.Template(tmpl))
res = mock.Mock()
res.preview.return_value = 'foo'
self.stack._resources = {'r1': res}
resources = self.stack.preview_resources()
self.assertEqual(['foo'], resources)
def test_correct_outputs(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'AResource': {'Type': 'ResourceWithPropsType',
'Properties': {'Foo': 'abc'}},
'BResource': {'Type': 'ResourceWithPropsType',
'Properties': {'Foo': 'def'}}},
'Outputs': {
'Resource_attr': {
'Value': {
'Fn::GetAtt': ['AResource', 'Foo']}}}}
self.stack = stack.Stack(self.ctx, 'stack_with_correct_outputs',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
self.assertEqual('abc', self.stack['AResource'].properties['Foo'])
# According _resolve_attribute method in GenericResource output
# value will be equal with name AResource.
self.stack._update_all_resource_data(False, True)
self.assertEqual('AResource',
self.stack.outputs['Resource_attr'].get_value())
self.stack.delete()
self.assertEqual((self.stack.DELETE, self.stack.COMPLETE),
self.stack.state)
def test_incorrect_outputs(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'AResource': {'Type': 'ResourceWithPropsType',
'Properties': {'Foo': 'abc'}}},
'Outputs': {
'Resource_attr': {
'Value': {
'Fn::GetAtt': ['AResource', 'Bar']}}}}
self.stack = stack.Stack(self.ctx, 'stack_with_incorrect_outputs',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
ex = self.assertRaises(exception.InvalidTemplateAttribute,
self.stack.outputs['Resource_attr'].get_value)
self.assertIn('The Referenced Attribute (AResource Bar) is '
'incorrect.',
str(ex))
self.stack.delete()
self.assertEqual((self.stack.DELETE, self.stack.COMPLETE),
self.stack.state)
def test_stack_load_no_param_value_validation(self):
"""Test stack loading with disabled parameter value validation."""
tmpl = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
flavor:
type: string
description: A flavor.
constraints:
- custom_constraint: nova.flavor
resources:
a_resource:
type: GenericResourceType
''')
# Mock objects so the query for flavors in server.FlavorConstraint
# works for stack creation
fc = fakes.FakeClient()
self.patchobject(nova.NovaClientPlugin, 'client', return_value=fc)
fc.flavors = mock.Mock()
flavor = collections.namedtuple("Flavor", ["id", "name"])
flavor.id = "1234"
flavor.name = "dummy"
fc.flavors.get.return_value = flavor
test_env = environment.Environment({'flavor': '1234'})
self.stack = stack.Stack(self.ctx, 'stack_with_custom_constraint',
template.Template(tmpl, env=test_env))
self.stack.validate()
self.stack.store()
self.stack.create()
stack_id = self.stack.id
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
loaded_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id)
self.assertEqual(stack_id, loaded_stack.parameters['OS::stack_id'])
fc.flavors.get.assert_called_once_with('1234')
def test_snapshot_delete(self):
snapshots = []
class ResourceDeleteSnapshot(generic_rsrc.ResourceWithProps):
def handle_delete_snapshot(self, data):
snapshots.append(data)
resource._register_class(
'ResourceDeleteSnapshot', ResourceDeleteSnapshot)
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'AResource': {'Type': 'ResourceDeleteSnapshot'}}}
self.stack = stack.Stack(self.ctx, 'snapshot_stack',
template.Template(tmpl))
data = self.stack.prepare_abandon()
fake_snapshot = collections.namedtuple('Snapshot', ('data',))(data)
self.stack.delete_snapshot(fake_snapshot)
self.assertEqual([data['resources']['AResource']], snapshots)
def test_delete_snapshot_without_data(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'R1': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'snapshot_stack',
template.Template(tmpl))
fake_snapshot = collections.namedtuple('Snapshot', ('data',))(None)
self.assertIsNone(self.stack.delete_snapshot(fake_snapshot))
def test_incorrect_outputs_cfn_get_attr(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'AResource': {'Type': 'ResourceWithPropsType',
'Properties': {'Foo': 'abc'}}},
'Outputs': {
'Resource_attr': {
'Value': {
'Fn::GetAtt': ['AResource', 'Bar']}}}}
self.stack = stack.Stack(self.ctx, 'stack_with_correct_outputs',
template.Template(tmpl))
self.assertRaisesRegex(
exception.StackValidationFailed,
('Outputs.Resource_attr.Value.Fn::GetAtt: The Referenced '
r'Attribute \(AResource Bar\) is incorrect.'),
self.stack.validate)
def test_incorrect_outputs_cfn_incorrect_reference(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Outputs:
Output:
Value:
Fn::GetAtt:
- Resource
- Foo
""")
self.stack = stack.Stack(self.ctx, 'stack_with_incorrect_outputs',
template.Template(tmpl))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn('The specified reference "Resource" '
'(in unknown) is incorrect.', str(ex))
def test_incorrect_outputs_incorrect_reference(self):
tmpl = template_format.parse("""
heat_template_version: 2013-05-23
outputs:
output:
value: { get_attr: [resource, foo] }
""")
self.stack = stack.Stack(self.ctx, 'stack_with_incorrect_outputs',
template.Template(tmpl))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn('The specified reference "resource" '
'(in unknown) is incorrect.', str(ex))
def test_incorrect_outputs_cfn_missing_value(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Resources:
AResource:
Type: ResourceWithPropsType
Properties:
Foo: abc
Outputs:
Resource_attr:
Description: the attr
""")
self.stack = stack.Stack(self.ctx, 'stack_with_correct_outputs',
template.Template(tmpl))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn('Each output definition must contain a Value key.',
str(ex))
self.assertIn('Outputs.Resource_attr', str(ex))
def test_incorrect_outputs_cfn_empty_value(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Resources:
AResource:
Type: ResourceWithPropsType
Properties:
Foo: abc
Outputs:
Resource_attr:
Value: ''
""")
self.stack = stack.Stack(self.ctx, 'stack_with_correct_outputs',
template.Template(tmpl))
self.assertIsNone(self.stack.validate())
def test_incorrect_outputs_cfn_none_value(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Resources:
AResource:
Type: ResourceWithPropsType
Properties:
Foo: abc
Outputs:
Resource_attr:
Value:
""")
self.stack = stack.Stack(self.ctx, 'stack_with_correct_outputs',
template.Template(tmpl))
self.assertIsNone(self.stack.validate())
def test_incorrect_outputs_cfn_string_data(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Resources:
AResource:
Type: ResourceWithPropsType
Properties:
Foo: abc
Outputs:
Resource_attr:
This is wrong data
""")
self.stack = stack.Stack(self.ctx, 'stack_with_correct_outputs',
template.Template(tmpl))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn('Found a %s instead' % str.__name__,
str(ex))
self.assertIn('Outputs.Resource_attr', str(ex))
def test_prop_validate_value(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Resources:
AResource:
Type: ResourceWithPropsType
Properties:
FooInt: notanint
""")
self.stack = stack.Stack(self.ctx, 'stack_with_bad_property',
template.Template(tmpl))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn("'notanint' is not an integer",
str(ex))
self.stack.strict_validate = False
self.assertIsNone(self.stack.validate())
def test_disable_validate_required_param(self):
tmpl = template_format.parse("""
heat_template_version: 2013-05-23
parameters:
aparam:
type: number
resources:
AResource:
type: ResourceWithPropsRefPropOnValidate
properties:
FooInt: {get_param: aparam}
""")
self.stack = stack.Stack(self.ctx, 'stack_with_reqd_param',
template.Template(tmpl))
ex = self.assertRaises(exception.UserParameterMissing,
self.stack.validate)
self.assertIn("The Parameter (aparam) was not provided",
str(ex))
self.stack.strict_validate = False
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn("The Parameter (aparam) was not provided",
str(ex))
self.assertIsNone(self.stack.validate(validate_res_tmpl_only=True))
def test_nodisable_validate_tmpl_err(self):
tmpl = template_format.parse("""
heat_template_version: 2013-05-23
resources:
AResource:
type: ResourceWithPropsRefPropOnValidate
depends_on: noexist
properties:
FooInt: 123
""")
self.stack = stack.Stack(self.ctx, 'stack_with_tmpl_err',
template.Template(tmpl))
ex = self.assertRaises(exception.InvalidTemplateReference,
self.stack.validate)
self.assertIn(
"The specified reference \"noexist\" (in AResource) is incorrect",
str(ex))
self.stack.strict_validate = False
ex = self.assertRaises(exception.InvalidTemplateReference,
self.stack.validate)
self.assertIn(
"The specified reference \"noexist\" (in AResource) is incorrect",
str(ex))
ex = self.assertRaises(exception.InvalidTemplateReference,
self.stack.validate,
validate_res_tmpl_only=True)
self.assertIn(
"The specified reference \"noexist\" (in AResource) is incorrect",
str(ex))
def test_validate_property_getatt(self):
tmpl = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'R1': {'Type': 'ResourceWithPropsType'},
'R2': {'Type': 'ResourceWithPropsType',
'Properties': {'Foo': {'Fn::GetAtt': ['R1', 'Foo']}}}}
}
self.stack = stack.Stack(self.ctx, 'test_stack',
template.Template(tmpl))
self.assertIsNone(self.stack.validate())
def test_param_validate_value(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Parameters:
foo:
Type: Number
""")
env1 = environment.Environment({'parameters': {'foo': 'abc'}})
self.stack = stack.Stack(self.ctx, 'stack_with_bad_param',
template.Template(tmpl, env=env1))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn("Parameter 'foo' is invalid: could not convert "
"string to float:", str(ex))
self.assertIn("abc", str(ex))
self.stack.strict_validate = False
self.assertIsNone(self.stack.validate())
def test_incorrect_outputs_cfn_list_data(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Resources:
AResource:
Type: ResourceWithPropsType
Properties:
Foo: abc
Outputs:
Resource_attr:
- Data is not what it seems
""")
self.stack = stack.Stack(self.ctx, 'stack_with_correct_outputs',
template.Template(tmpl))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn('Found a list', str(ex))
self.assertIn('Outputs.Resource_attr', str(ex))
def test_incorrect_deletion_policy(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Parameters:
Deletion_Policy:
Type: String
Default: [1, 2]
Resources:
AResource:
Type: ResourceWithPropsType
DeletionPolicy: {Ref: Deletion_Policy}
Properties:
Foo: abc
""")
self.stack = stack.Stack(self.ctx, 'stack_bad_delpol',
template.Template(tmpl))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn('Invalid deletion policy "[1, 2]"',
str(ex))
def test_deletion_policy_apply_ref(self):
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Parameters:
Deletion_Policy:
Type: String
Default: Delete
Resources:
AResource:
Type: ResourceWithPropsType
DeletionPolicy: wibble
Properties:
Foo: abc
DeletionPolicy: {Ref: Deletion_Policy}
""")
self.stack = stack.Stack(self.ctx, 'stack_delpol_get_param',
template.Template(tmpl))
self.stack.validate()
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
def test_deletion_policy_apply_get_param(self):
tmpl = template_format.parse("""
heat_template_version: 2016-04-08
parameters:
deletion_policy:
type: string
default: Delete
resources:
AResource:
type: ResourceWithPropsType
deletion_policy: {get_param: deletion_policy}
properties:
Foo: abc
""")
self.stack = stack.Stack(self.ctx, 'stack_delpol_get_param',
template.Template(tmpl))
self.stack.validate()
self.stack.store()
self.stack.create()
self.assertEqual((self.stack.CREATE, self.stack.COMPLETE),
self.stack.state)
def test_incorrect_deletion_policy_hot(self):
tmpl = template_format.parse("""
heat_template_version: 2013-05-23
parameters:
deletion_policy:
type: string
default: [1, 2]
resources:
AResource:
type: ResourceWithPropsType
deletion_policy: {get_param: deletion_policy}
properties:
Foo: abc
""")
self.stack = stack.Stack(self.ctx, 'stack_bad_delpol',
template.Template(tmpl))
ex = self.assertRaises(exception.StackValidationFailed,
self.stack.validate)
self.assertIn('Invalid deletion policy "[1, 2]',
str(ex))
def test_incorrect_outputs_hot_get_attr(self):
tmpl = {'heat_template_version': '2013-05-23',
'resources': {
'AResource': {'type': 'ResourceWithPropsType',
'properties': {'Foo': 'abc'}}},
'outputs': {
'resource_attr': {
'value': {
'get_attr': ['AResource', 'Bar']}}}}
self.stack = stack.Stack(self.ctx, 'stack_with_correct_outputs',
template.Template(tmpl))
self.assertRaisesRegex(
exception.StackValidationFailed,
('outputs.resource_attr.value.get_attr: The Referenced Attribute '
r'\(AResource Bar\) is incorrect.'),
self.stack.validate)
def test_snapshot_save_called_first(self):
def snapshotting_called_first(stack, action, status, reason):
self.assertEqual(stack.status, stack.IN_PROGRESS)
self.assertEqual(stack.action, stack.SNAPSHOT)
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'A': {'Type': 'GenericResourceType'},
'B': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'stack_details_test',
template.Template(tmpl))
self.stack.store()
self.stack.create()
self.stack.snapshot(save_snapshot_func=snapshotting_called_first)
def test_restore(self):
tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'A': {'Type': 'GenericResourceType'},
'B': {'Type': 'GenericResourceType'}}}
self.stack = stack.Stack(self.ctx, 'stack_details_test',
template.Template(tmpl))
self.stack.store()
self.stack.create()
data = copy.deepcopy(self.stack.prepare_abandon())
fake_snapshot = collections.namedtuple(
'Snapshot', ('data', 'stack_id'))(data, self.stack.id)
new_tmpl = {'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {'A': {'Type': 'GenericResourceType'}}}
updated_stack = stack.Stack(self.ctx, 'updated_stack',
template.Template(new_tmpl))
self.stack.update(updated_stack)
self.assertEqual(1, len(self.stack.resources))
self.stack.restore(fake_snapshot)
self.assertEqual((stack.Stack.RESTORE, stack.Stack.COMPLETE),
self.stack.state)
self.assertEqual(2, len(self.stack.resources))
def test_restore_with_original_env(self):
tmpl = {
'heat_template_version': '2013-05-23',
'parameters': {
'foo': {'type': 'string'}
},
'resources': {
'A': {
'type': 'ResourceWithPropsType',
'properties': {'Foo': {'get_param': 'foo'}}
}
}
}
self.stack = stack.Stack(self.ctx, 'stack_restore_test',
template.Template(
tmpl,
env=environment.Environment(
{'foo': 'abc'})))
self.stack.store()
self.stack.create()
self.assertEqual('abc',
self.stack.resources['A'].properties['Foo'])
data = copy.deepcopy(self.stack.prepare_abandon())
fake_snapshot = collections.namedtuple(
'Snapshot', ('data', 'stack_id'))(data, self.stack.id)
updated_stack = stack.Stack(self.ctx, 'updated_stack',
template.Template(
tmpl,
env=environment.Environment(
{'foo': 'xyz'})))
self.stack.update(updated_stack)
self.assertEqual('xyz',
self.stack.resources['A'].properties['Foo'])
self.stack.restore(fake_snapshot)
self.assertEqual((stack.Stack.RESTORE, stack.Stack.COMPLETE),
self.stack.state)
self.assertEqual('abc',
self.stack.resources['A'].properties['Foo'])
def test_hot_restore(self):
tpl = {'heat_template_version': '2013-05-23',
'resources':
{'A': {'type': 'ResourceWithRestoreType'}}}
self.stack = stack.Stack(self.ctx, 'stack_details_test',
template.Template(tpl))
self.stack.store()
self.stack.create()
data = self.stack.prepare_abandon()
data['resources']['A']['resource_data']['a_string'] = 'foo'
fake_snapshot = collections.namedtuple(
'Snapshot', ('data', 'stack_id'))(data, self.stack.id)
self.stack.restore(fake_snapshot)
self.assertEqual((stack.Stack.RESTORE, stack.Stack.COMPLETE),
self.stack.state)
self.assertEqual(
'foo', self.stack.resources['A'].properties['a_string'])
@mock.patch.object(stack.Stack, 'db_resource_get')
def test_lightweight_stack_getatt(self, mock_drg):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'},
'bar': {
'Type': 'ResourceWithPropsType',
'Properties': {
'Foo': {'Fn::GetAtt': ['foo', 'bar']},
}
}
}
})
rsrcs_data = {'foo': {'reference_id': 'foo-id',
'attrs': {'bar': 'baz'}, 'uuid': mock.ANY,
'id': mock.ANY, 'action': 'CREATE',
'status': 'COMPLETE'},
'bar': {'reference_id': 'bar-id', 'uuid': mock.ANY,
'id': mock.ANY, 'action': 'CREATE',
'status': 'COMPLETE'}}
cache_data = {n: node_data.NodeData.from_dict(d)
for n, d in rsrcs_data.items()}
tmpl_stack = stack.Stack(self.ctx, 'test', tmpl)
tmpl_stack.store()
lightweight_stack = stack.Stack.load(self.ctx, stack_id=tmpl_stack.id,
cache_data=cache_data)
# Check if the property has the appropriate resolved value.
bar = resource.Resource(
'bar',
lightweight_stack.defn.resource_definition('bar'),
lightweight_stack)
self.assertEqual('baz', bar.properties['Foo'])
# Make sure FnGetAtt returns the cached value.
attr_value = lightweight_stack.defn['foo'].FnGetAtt('bar')
self.assertEqual('baz', attr_value)
# Make sure calls are not made to the database to retrieve the
# resource state.
self.assertFalse(mock_drg.called)
@mock.patch.object(stack.Stack, 'db_resource_get')
def test_lightweight_stack_getrefid(self, mock_drg):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'},
'bar': {
'Type': 'ResourceWithPropsType',
'Properties': {
'Foo': {'Ref': 'foo'},
}
}
}
})
rsrcs_data = {'foo': {'reference_id': 'physical-resource-id',
'uuid': mock.ANY, 'id': mock.ANY,
'action': 'CREATE', 'status': 'COMPLETE'},
'bar': {'reference_id': 'bar-id', 'uuid': mock.ANY,
'id': mock.ANY, 'action': 'CREATE',
'status': 'COMPLETE'}}
cache_data = {n: node_data.NodeData.from_dict(d)
for n, d in rsrcs_data.items()}
tmpl_stack = stack.Stack(self.ctx, 'test', tmpl)
tmpl_stack.store()
lightweight_stack = stack.Stack.load(self.ctx, stack_id=tmpl_stack.id,
cache_data=cache_data)
# Check if the property has the appropriate resolved value.
bar = resource.Resource(
'bar',
lightweight_stack.defn.resource_definition('bar'),
lightweight_stack)
self.assertEqual('physical-resource-id', bar.properties['Foo'])
# Make sure FnGetRefId returns the cached value.
resource_id = lightweight_stack.defn['foo'].FnGetRefId()
self.assertEqual('physical-resource-id', resource_id)
# Make sure calls are not made to the database to retrieve the
# resource state.
self.assertFalse(mock_drg.called)
def test_encrypt_parameters_false_parameters_stored_plaintext(self):
"""Test stack loading with disabled parameter value validation."""
tmpl = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
param1:
type: string
description: value1.
param2:
type: string
description: value2.
hidden: true
resources:
a_resource:
type: GenericResourceType
''')
env1 = environment.Environment({'param1': 'foo', 'param2': 'bar'})
self.stack = stack.Stack(self.ctx, 'test',
template.Template(tmpl, env=env1))
cfg.CONF.set_override('encrypt_parameters_and_properties', False)
# Verify that hidden parameters stored in plain text
self.stack.store()
db_stack = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
params = db_stack.raw_template.environment['parameters']
self.assertEqual('foo', params['param1'])
self.assertEqual('bar', params['param2'])
def test_parameters_stored_encrypted_decrypted_on_load(self):
"""Test stack loading with disabled parameter value validation."""
tmpl = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
param1:
type: string
description: value1.
param2:
type: string
description: value2.
hidden: true
resources:
a_resource:
type: GenericResourceType
''')
env1 = environment.Environment({'param1': 'foo', 'param2': 'bar'})
self.stack = stack.Stack(self.ctx, 'test',
template.Template(tmpl, env=env1))
cfg.CONF.set_override('encrypt_parameters_and_properties', True)
# Verify that hidden parameters are stored encrypted
self.stack.store()
db_tpl = db_api.raw_template_get(self.ctx, self.stack.t.id)
db_params = db_tpl.environment['parameters']
self.assertEqual('foo', db_params['param1'])
self.assertEqual('cryptography_decrypt_v1', db_params['param2'][0])
self.assertIsNotNone(db_params['param2'][1])
# Verify that loaded stack has decrypted paramters
loaded_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id)
params = loaded_stack.t.env.params
self.assertEqual('foo', params.get('param1'))
self.assertEqual('bar', params.get('param2'))
# test update the param2
loaded_stack.state_set(self.stack.CREATE, self.stack.COMPLETE,
'for_update')
env2 = environment.Environment({'param1': 'foo', 'param2': 'new_bar'})
new_stack = stack.Stack(self.ctx, 'test_update',
template.Template(tmpl, env=env2))
loaded_stack.update(new_stack)
self.assertEqual((loaded_stack.UPDATE, loaded_stack.COMPLETE),
loaded_stack.state)
db_tpl = db_api.raw_template_get(self.ctx, loaded_stack.t.id)
db_params = db_tpl.environment['parameters']
self.assertEqual('foo', db_params['param1'])
self.assertEqual('cryptography_decrypt_v1', db_params['param2'][0])
self.assertIsNotNone(db_params['param2'][1])
loaded_stack1 = stack.Stack.load(self.ctx, stack_id=self.stack.id)
params = loaded_stack1.t.env.params
self.assertEqual('foo', params.get('param1'))
self.assertEqual('new_bar', params.get('param2'))
def test_parameters_created_encrypted_updated_decrypted(self):
"""Test stack loading with disabled parameter value validation."""
tmpl = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
param1:
type: string
description: value1.
param2:
type: string
description: value2.
hidden: true
resources:
a_resource:
type: GenericResourceType
''')
# Create the stack with encryption enabled
cfg.CONF.set_override('encrypt_parameters_and_properties', True)
env1 = environment.Environment({'param1': 'foo', 'param2': 'bar'})
self.stack = stack.Stack(self.ctx, 'test',
template.Template(tmpl, env=env1))
self.stack.store()
# Update the stack with encryption disabled
cfg.CONF.set_override('encrypt_parameters_and_properties', False)
loaded_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id)
loaded_stack.state_set(self.stack.CREATE, self.stack.COMPLETE,
'for_update')
env2 = environment.Environment({'param1': 'foo', 'param2': 'new_bar'})
new_stack = stack.Stack(self.ctx, 'test_update',
template.Template(tmpl, env=env2))
self.assertEqual(['param2'], loaded_stack.env.encrypted_param_names)
# Without the fix for bug #1572294, loaded_stack.update() will
# blow up with "ValueError: too many values to unpack"
loaded_stack.update(new_stack)
self.assertEqual([], loaded_stack.env.encrypted_param_names)
def test_parameters_inconsistent_encrypted_param_names(self):
tmpl = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
param1:
type: string
description: value1.
param2:
type: string
description: value2.
hidden: true
resources:
a_resource:
type: GenericResourceType
''')
warning_logger = self.useFixture(
fixtures.FakeLogger(level=logging.WARNING,
format="%(levelname)8s [%(name)s] "
"%(message)s"))
cfg.CONF.set_override('encrypt_parameters_and_properties', False)
env1 = environment.Environment({'param1': 'foo', 'param2': 'bar'})
self.stack = stack.Stack(self.ctx, 'test',
template.Template(tmpl, env=env1))
self.stack.store()
loaded_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id)
loaded_stack.state_set(self.stack.CREATE, self.stack.COMPLETE,
'for_update')
env2 = environment.Environment({'param1': 'foo', 'param2': 'new_bar'})
# Put inconsistent encrypted_param_names data in the environment
env2.encrypted_param_names = ['param1']
new_stack = stack.Stack(self.ctx, 'test_update',
template.Template(tmpl, env=env2))
self.assertIsNone(loaded_stack.update(new_stack))
self.assertIn('Encountered already-decrypted data',
warning_logger.output)
def test_parameters_stored_decrypted_successful_load(self):
"""Test stack loading with disabled parameter value validation."""
tmpl = template_format.parse('''
heat_template_version: 2013-05-23
parameters:
param1:
type: string
description: value1.
param2:
type: string
description: value2.
hidden: true
resources:
a_resource:
type: GenericResourceType
''')
env1 = environment.Environment({'param1': 'foo', 'param2': 'bar'})
self.stack = stack.Stack(self.ctx, 'test',
template.Template(tmpl, env=env1))
cfg.CONF.set_override('encrypt_parameters_and_properties', False)
# Verify that hidden parameters are stored decrypted
self.stack.store()
db_tpl = db_api.raw_template_get(self.ctx, self.stack.t.id)
db_params = db_tpl.environment['parameters']
self.assertEqual('foo', db_params['param1'])
self.assertEqual('bar', db_params['param2'])
# Verify that stack loads without error
loaded_stack = stack.Stack.load(self.ctx, stack_id=self.stack.id)
params = loaded_stack.t.env.params
self.assertEqual('foo', params.get('param1'))
self.assertEqual('bar', params.get('param2'))
def test_event_dispatch(self):
env = environment.Environment()
evt = eventlet.event.Event()
sink = fakes.FakeEventSink(evt)
env.register_event_sink('dummy', lambda: sink)
env.load({"event_sinks": [{"type": "dummy"}]})
stk = stack.Stack(self.ctx, 'test',
template.Template(empty_template, env=env))
stk.thread_group_mgr = service.ThreadGroupManager()
self.addCleanup(stk.thread_group_mgr.stop, stk.id)
stk.store()
stk._add_event('CREATE', 'IN_PROGRESS', '')
evt.wait()
expected = [{
'id': mock.ANY,
'timestamp': mock.ANY,
'type': 'os.heat.event',
'version': '0.1',
'payload': {
'physical_resource_id': stk.id,
'resource_action': 'CREATE',
'resource_name': 'test',
'resource_properties': {},
'resource_status': 'IN_PROGRESS',
'resource_status_reason': '',
'resource_type':
'OS::Heat::Stack',
'stack_id': stk.id,
'version': '0.1'}}]
self.assertEqual(expected, sink.events)
@mock.patch.object(stack_object.Stack, 'delete')
@mock.patch.object(raw_template_object.RawTemplate, 'delete')
def test_mark_complete_create(self, mock_tmpl_delete, mock_stack_delete):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
tmpl_stack = stack.Stack(self.ctx, 'test', tmpl, convergence=True)
tmpl_stack.store()
tmpl_stack.action = tmpl_stack.CREATE
tmpl_stack.status = tmpl_stack.IN_PROGRESS
tmpl_stack.current_traversal = 'some-traversal'
tmpl_stack.mark_complete()
self.assertEqual(tmpl_stack.prev_raw_template_id,
None)
self.assertFalse(mock_tmpl_delete.called)
self.assertFalse(mock_stack_delete.called)
self.assertEqual(tmpl_stack.status, tmpl_stack.COMPLETE)
@mock.patch.object(stack.Stack, 'purge_db')
def test_mark_complete_update(self, mock_purge_db):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
cfg.CONF.set_default('convergence_engine', True)
tmpl_stack = stack.Stack(self.ctx, 'test', tmpl, convergence=True)
tmpl_stack.prev_raw_template_id = 1
tmpl_stack.action = tmpl_stack.UPDATE
tmpl_stack.status = tmpl_stack.IN_PROGRESS
tmpl_stack.current_traversal = 'some-traversal'
tmpl_stack.store()
tmpl_stack.mark_complete()
self.assertTrue(mock_purge_db.called)
@mock.patch.object(stack.Stack, 'purge_db')
def test_mark_complete_update_delete(self, mock_purge_db):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Description': 'Empty Template'
})
cfg.CONF.set_default('convergence_engine', True)
tmpl_stack = stack.Stack(self.ctx, 'test', tmpl, convergence=True)
tmpl_stack.prev_raw_template_id = 1
tmpl_stack.action = tmpl_stack.DELETE
tmpl_stack.status = tmpl_stack.IN_PROGRESS
tmpl_stack.current_traversal = 'some-traversal'
tmpl_stack.store()
tmpl_stack.mark_complete()
self.assertTrue(mock_purge_db.called)
@mock.patch.object(stack.Stack, 'purge_db')
def test_mark_complete_stale_traversal(self, mock_purge_db):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
tmpl_stack = stack.Stack(self.ctx, 'test', tmpl)
tmpl_stack.store()
# emulate stale traversal
tmpl_stack.current_traversal = 'old-traversal'
tmpl_stack.mark_complete()
self.assertFalse(mock_purge_db.called)
@mock.patch.object(function, 'validate')
def test_validate_assertion_exception_rethrow(self, func_val):
expected_msg = 'Expected Assertion Error'
with mock.patch('heat.engine.stack.dependencies',
new_callable=mock.PropertyMock) as mock_dependencies:
mock_dependency = mock.MagicMock()
mock_dependency.name = 'res'
mock_dependency.external_id = None
mock_dependency.validate.side_effect = AssertionError(expected_msg)
mock_dependencies.Dependencies.return_value = [mock_dependency]
stc = stack.Stack(self.ctx, utils.random_name(), self.tmpl)
mock_res = mock.Mock()
mock_res.name = mock_dependency.name
mock_res.t = mock.Mock()
mock_res.t.name = mock_res.name
stc._resources = {mock_res.name: mock_res}
expected_exception = self.assertRaises(AssertionError,
stc.validate)
self.assertEqual(expected_msg, str(expected_exception))
mock_dependency.validate.assert_called_once_with()
tmpl = template_format.parse("""
HeatTemplateFormatVersion: '2012-12-12'
Outputs:
foo:
Value: bar
""")
stc = stack.Stack(self.ctx, utils.random_name(),
template.Template(tmpl))
func_val.side_effect = AssertionError(expected_msg)
expected_exception = self.assertRaises(AssertionError, stc.validate)
self.assertEqual(expected_msg, str(expected_exception))
@mock.patch.object(update, 'StackUpdate')
def test_update_task_exception(self, mock_stack_update):
class RandomException(Exception):
pass
tmpl1 = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
self.stack = stack.Stack(utils.dummy_context(), 'test_stack', tmpl1)
self.stack.store()
self.stack.create()
self.assertEqual((stack.Stack.CREATE, stack.Stack.COMPLETE),
self.stack.state)
tmpl2 = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'},
'bar': {'Type': 'GenericResourceType'}
}
})
updated_stack = stack.Stack(utils.dummy_context(), 'test_stack', tmpl2)
mock_stack_update.side_effect = RandomException()
self.assertRaises(RandomException, self.stack.update, updated_stack)
def update_exception_handler(self, exc, action=stack.Stack.UPDATE,
disable_rollback=False):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
self.stack = stack.Stack(utils.dummy_context(),
'test_stack',
tmpl,
disable_rollback=disable_rollback)
self.stack.store()
rb = self.stack._update_exception_handler(exc=exc, action=action)
return rb
def test_update_exception_handler_resource_failure_no_rollback(self):
reason = 'something strange happened'
exc = exception.ResourceFailure(reason, None, action='UPDATE')
rb = self.update_exception_handler(exc, disable_rollback=True)
self.assertFalse(rb)
def test_update_exception_handler_resource_failure_rollback(self):
reason = 'something strange happened'
exc = exception.ResourceFailure(reason, None, action='UPDATE')
rb = self.update_exception_handler(exc, disable_rollback=False)
self.assertTrue(rb)
def test_update_exception_handler_force_cancel_with_rollback(self):
exc = stack.ForcedCancel(with_rollback=True)
rb = self.update_exception_handler(exc, disable_rollback=False)
self.assertTrue(rb)
def test_update_exception_handler_force_cancel_with_rollback_off(self):
# stack-cancel-update from user *always* rolls back
exc = stack.ForcedCancel(with_rollback=True)
rb = self.update_exception_handler(exc, disable_rollback=True)
self.assertTrue(rb)
def test_update_exception_handler_force_cancel_nested(self):
exc = stack.ForcedCancel(with_rollback=False)
rb = self.update_exception_handler(exc, disable_rollback=True)
self.assertFalse(rb)
def test_store_generates_new_traversal_id_for_new_stack(self):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
self.stack = stack.Stack(utils.dummy_context(),
'test_stack', tmpl, convergence=True)
self.assertIsNone(self.stack.current_traversal)
self.stack.store()
self.assertIsNotNone(self.stack.current_traversal)
@mock.patch.object(stack_object.Stack, 'select_and_update')
def test_store_uses_traversal_id_for_updating_db(self, mock_sau):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
self.stack = stack.Stack(utils.dummy_context(),
'test_stack', tmpl, convergence=True)
mock_sau.return_value = True
self.stack.id = 1
self.stack.current_traversal = 1
stack_id = self.stack.store()
mock_sau.assert_called_once_with(mock.ANY, 1, mock.ANY, exp_trvsl=1)
self.assertEqual(1, stack_id)
# ensure store uses given expected traversal ID
stack_id = self.stack.store(exp_trvsl=2)
self.assertEqual(1, stack_id)
mock_sau.assert_called_with(mock.ANY, 1, mock.ANY, exp_trvsl=2)
@mock.patch.object(stack_object.Stack, 'select_and_update')
def test_store_db_update_failure(self, mock_sau):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
self.stack = stack.Stack(utils.dummy_context(),
'test_stack', tmpl, convergence=True)
mock_sau.return_value = False
self.stack.id = 1
stack_id = self.stack.store()
self.assertIsNone(stack_id)
@mock.patch.object(stack_object.Stack, 'select_and_update')
def test_state_set_uses_curr_traversal_for_updating_db(self, mock_sau):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'}
}
})
self.stack = stack.Stack(utils.dummy_context(),
'test_stack', tmpl, convergence=True)
self.stack.id = 1
self.stack.current_traversal = 'curr-traversal'
self.stack.store()
self.stack.state_set(self.stack.UPDATE, self.stack.IN_PROGRESS, '')
mock_sau.assert_called_once_with(mock.ANY, 1, mock.ANY,
exp_trvsl='curr-traversal')
class StackKwargsForCloningTest(common.HeatTestCase):
scenarios = [
('default', dict(keep_status=False, only_db=False, keep_tags=False,
not_included=['action', 'status', 'status_reason',
'tags'])),
('only_db', dict(keep_status=False, only_db=True, keep_tags=False,
not_included=['action', 'status', 'status_reason',
'strict_validate', 'tags'])),
('keep_status', dict(keep_status=True, only_db=False, keep_tags=False,
not_included=['tags'])),
('status_db', dict(keep_status=True, only_db=True, keep_tags=False,
not_included=['strict_validate', 'tags'])),
('keep_tags', dict(keep_status=False, only_db=False, keep_tags=True,
not_included=['action', 'status', 'status_reason']))
]
def test_kwargs(self):
tmpl = template.Template(copy.deepcopy(empty_template))
ctx = utils.dummy_context()
test_data = dict(action='x', status='y',
status_reason='z', timeout_mins=33,
disable_rollback=True, parent_resource='fred',
owner_id=32, stack_user_project_id=569,
user_creds_id=123, tenant_id='some-uuid',
username='jo', nested_depth=3,
strict_validate=True, convergence=False,
current_traversal=45,
tags=['tag1', 'tag2'])
db_map = {'parent_resource': 'parent_resource_name',
'tenant_id': 'tenant', 'timeout_mins': 'timeout'}
test_db_data = {}
for key in test_data:
dbkey = db_map.get(key, key)
test_db_data[dbkey] = test_data[key]
self.stack = stack.Stack(ctx, utils.random_name(), tmpl,
**test_data)
res = self.stack.get_kwargs_for_cloning(keep_status=self.keep_status,
only_db=self.only_db,
keep_tags=self.keep_tags)
for key in self.not_included:
self.assertNotIn(key, res)
for key in test_data:
if key not in self.not_included:
dbkey = db_map.get(key, key)
if self.only_db:
self.assertEqual(test_data[key], res[dbkey])
else:
self.assertEqual(test_data[key], res[key])
if not self.only_db:
# just make sure that the kwargs are valid
# (no exception should be raised)
stack.Stack(ctx, utils.random_name(), tmpl, **res)
class ResetStateOnErrorTest(common.HeatTestCase):
class DummyStack(object):
(COMPLETE, IN_PROGRESS, FAILED) = range(3)
action = 'something'
status = COMPLETE
def __init__(self):
self.mark_failed = mock.MagicMock()
self.convergence = False
@stack.reset_state_on_error
def raise_exception(self):
self.status = self.IN_PROGRESS
raise ValueError('oops')
@stack.reset_state_on_error
def raise_exit_exception(self):
self.status = self.IN_PROGRESS
raise BaseException('bye')
@stack.reset_state_on_error
def succeed(self):
return 'Hello world'
@stack.reset_state_on_error
def fail(self):
self.status = self.FAILED
return 'Hello world'
def test_success(self):
dummy = self.DummyStack()
self.assertEqual('Hello world', dummy.succeed())
self.assertFalse(dummy.mark_failed.called)
def test_failure(self):
dummy = self.DummyStack()
self.assertEqual('Hello world', dummy.fail())
self.assertFalse(dummy.mark_failed.called)
def test_reset_state_exception(self):
dummy = self.DummyStack()
exc = self.assertRaises(ValueError, dummy.raise_exception)
self.assertIn('oops', str(exc))
self.assertTrue(dummy.mark_failed.called)
def test_reset_state_exit_exception(self):
dummy = self.DummyStack()
exc = self.assertRaises(BaseException, dummy.raise_exit_exception)
self.assertIn('bye', str(exc))
self.assertTrue(dummy.mark_failed.called)
class StackStateSetTest(common.HeatTestCase):
scenarios = [
('in_progress', dict(action=stack.Stack.CREATE,
status=stack.Stack.IN_PROGRESS,
persist_count=1, error=False)),
('create_complete', dict(action=stack.Stack.CREATE,
status=stack.Stack.COMPLETE,
persist_count=0, error=False)),
('create_failed', dict(action=stack.Stack.CREATE,
status=stack.Stack.FAILED,
persist_count=0, error=False)),
('update_complete', dict(action=stack.Stack.UPDATE,
status=stack.Stack.COMPLETE,
persist_count=1, error=False)),
('update_failed', dict(action=stack.Stack.UPDATE,
status=stack.Stack.FAILED,
persist_count=1, error=False)),
('delete_complete', dict(action=stack.Stack.DELETE,
status=stack.Stack.COMPLETE,
persist_count=1, error=False)),
('delete_failed', dict(action=stack.Stack.DELETE,
status=stack.Stack.FAILED,
persist_count=1, error=False)),
('adopt_complete', dict(action=stack.Stack.ADOPT,
status=stack.Stack.COMPLETE,
persist_count=0, error=False)),
('adopt_failed', dict(action=stack.Stack.ADOPT,
status=stack.Stack.FAILED,
persist_count=0, error=False)),
('rollback_complete', dict(action=stack.Stack.ROLLBACK,
status=stack.Stack.COMPLETE,
persist_count=1, error=False)),
('rollback_failed', dict(action=stack.Stack.ROLLBACK,
status=stack.Stack.FAILED,
persist_count=1, error=False)),
('invalid_action', dict(action='action',
status=stack.Stack.FAILED,
persist_count=0, error=True)),
('invalid_status', dict(action=stack.Stack.CREATE,
status='status',
persist_count=0, error=True)),
]
def test_state(self):
self.tmpl = template.Template(copy.deepcopy(empty_template))
self.ctx = utils.dummy_context()
self.stack = stack.Stack(self.ctx, 'test_stack', self.tmpl,
action=stack.Stack.CREATE,
status=stack.Stack.IN_PROGRESS)
persist_state = self.patchobject(self.stack, '_persist_state')
self.assertEqual((stack.Stack.CREATE, stack.Stack.IN_PROGRESS),
self.stack.state)
if self.error:
self.assertRaises(ValueError, self.stack.state_set,
self.action, self.status, 'test')
else:
self.stack.state_set(self.action, self.status, 'test')
self.assertEqual((self.action, self.status), self.stack.state)
self.assertEqual('test', self.stack.status_reason)
self.assertEqual(self.persist_count, persist_state.call_count)
| 41.965109 | 79 | 0.577625 | [
"Apache-2.0"
] | openstack/heat | heat/tests/test_stack.py | 131,099 | Python |
import sys
import traceback
from socket import error
from gevent.pywsgi import WSGIServer
from socketio.handler import SocketIOHandler
from socketio.policyserver import FlashPolicyServer
from socketio.virtsocket import Socket
from geventwebsocket.handler import WebSocketHandler
__all__ = ['SocketIOServer']
class SocketIOServer(WSGIServer):
"""A WSGI Server with a resource that acts like an SocketIO."""
def __init__(self, *args, **kwargs):
"""This is just like the standard WSGIServer __init__, except with a
few additional ``kwargs``:
:param resource: The URL which has to be identified as a
socket.io request. Defaults to the /socket.io/ URL.
:param transports: Optional list of transports to allow. List of
strings, each string should be one of
handler.SocketIOHandler.handler_types.
:param policy_server: Boolean describing whether or not to use the
Flash policy server. Default True.
:param policy_listener: A tuple containing (host, port) for the
policy server. This is optional and used only if policy server
is set to true. The default value is 0.0.0.0:843
:param heartbeat_interval: int The timeout for the server, we
should receive a heartbeat from the client within this
interval. This should be less than the
``heartbeat_timeout``.
:param heartbeat_timeout: int The timeout for the client when
it should send a new heartbeat to the server. This value
is sent to the client after a successful handshake.
:param close_timeout: int The timeout for the client, when it
closes the connection it still X amounts of seconds to do
re open of the connection. This value is sent to the
client after a successful handshake.
:param log_file: str The file in which you want the PyWSGI
server to write its access log. If not specified, it
is sent to `stderr` (with gevent 0.13).
"""
self.sockets = {}
if 'namespace' in kwargs:
print("DEPRECATION WARNING: use resource instead of namespace")
self.resource = kwargs.pop('namespace', 'socket.io')
else:
self.resource = kwargs.pop('resource', 'socket.io')
self.transports = kwargs.pop('transports', None)
if kwargs.pop('policy_server', True):
wsock = args[0]
try:
address, port = wsock.getsockname()
except AttributeError:
try:
address = wsock[0]
except TypeError:
try:
address = wsock.address[0]
except AttributeError:
address = wsock.cfg_addr[0]
policylistener = kwargs.pop('policy_listener', (address, 10843))
self.policy_server = FlashPolicyServer(policylistener)
else:
self.policy_server = None
# Extract other config options
self.config = {
'heartbeat_timeout': 60,
'close_timeout': 60,
'heartbeat_interval': 25,
}
for f in ('heartbeat_timeout', 'heartbeat_interval', 'close_timeout'):
if f in kwargs:
self.config[f] = int(kwargs.pop(f))
if not 'handler_class' in kwargs:
kwargs['handler_class'] = SocketIOHandler
if not 'ws_handler_class' in kwargs:
self.ws_handler_class = WebSocketHandler
else:
self.ws_handler_class = kwargs.pop('ws_handler_class')
log_file = kwargs.pop('log_file', None)
if log_file:
kwargs['log'] = open(log_file, 'a')
super(SocketIOServer, self).__init__(*args, **kwargs)
def start_accepting(self):
if self.policy_server is not None:
try:
if not self.policy_server.started:
self.policy_server.start()
except error as ex:
sys.stderr.write(
'FAILED to start flash policy server: %s\n' % (ex, ))
except Exception:
traceback.print_exc()
sys.stderr.write('FAILED to start flash policy server.\n\n')
super(SocketIOServer, self).start_accepting()
def stop(self, timeout=None):
if self.policy_server is not None:
self.policy_server.stop()
super(SocketIOServer, self).stop(timeout=timeout)
def handle(self, socket, address):
# Pass in the config about timeouts, heartbeats, also...
handler = self.handler_class(self.config, socket, address, self)
handler.handle()
def get_socket(self, sessid=''):
"""Return an existing or new client Socket."""
socket = self.sockets.get(sessid)
if sessid and not socket:
return None # you ask for a session that doesn't exist!
if socket is None:
socket = Socket(self, self.config)
self.sockets[socket.sessid] = socket
else:
socket.incr_hits()
return socket
def serve(app, **kw):
_quiet = kw.pop('_quiet', False)
_resource = kw.pop('resource', 'socket.io')
if not _quiet: # pragma: no cover
# idempotent if logging has already been set up
import logging
logging.basicConfig()
host = kw.pop('host', '127.0.0.1')
port = int(kw.pop('port', 6543))
transports = kw.pop('transports', None)
if transports:
transports = [x.strip() for x in transports.split(',')]
policy_server = kw.pop('policy_server', False)
if policy_server in (True, 'True', 'true', 'enable', 'yes', 'on', '1'):
policy_server = True
policy_listener_host = kw.pop('policy_listener_host', host)
policy_listener_port = int(kw.pop('policy_listener_port', 10843))
kw['policy_listener'] = (policy_listener_host, policy_listener_port)
else:
policy_server = False
server = SocketIOServer((host, port),
app,
resource=_resource,
transports=transports,
policy_server=policy_server,
**kw)
if not _quiet:
print(('serving on http://%s:%s' % (host, port)))
server.serve_forever()
def serve_paste(app, global_conf, **kw):
"""pserve / paster serve / waitress replacement / integration
You can pass as parameters:
transports = websockets, xhr-multipart, xhr-longpolling, etc...
policy_server = True
"""
serve(app, **kw)
return 0
| 35.172775 | 78 | 0.601221 | [
"BSD-3-Clause"
] | jykim16/gevent-socketio | socketio/server.py | 6,718 | Python |
#!/usr/bin/env python
# _*_ coding:utf-8 _*_
# Created by vikey on 2018/2/17
from __future__ import print_function
from __future__ import unicode_literals
def main():
pass
if __name__ == "__main__":
main()
| 14.6 | 39 | 0.707763 | [
"Apache-2.0"
] | chenyuanqi/python-training | src/day_16/src/models/__init__.py | 219 | Python |
import random
from time import sleep
def Guess():
global attempts
# If the user choose anything but a number between 0 and 10, they will get stuck in loop.
while True:
try:
attempts += 1 # This will count every attempt made by the user
user_number = int(input().replace(' ', ''))
except:
print("You should put a number between 0 and 10 <3")
else:
if user_number > 10 or user_number < 0:
print("I told you a number between 0 and 10 <3")
else:
break
return user_number
def NextGame():
# If the user choose anything but "[S] or [N]", they will get stuck in loop.
while True:
choice = input(
"Do you want to play again? [S]/[N] ").upper().replace(' ', '')
if (choice in "[S]" or choice in "[N]") and choice not in "[]":
break
else:
print("I didn't understand your choice.", end=' ')
return choice
# Introduction
print("\033[1;36m=-"*20, "\033[m")
print(f'\033[1;36m {"Lets play Number Guesser!":^40}\033[m')
print("\033[1;36m=-"*20, "\033[m")
sleep(2)
# The user will choose a mode or will get stuck in a loop until they do so.
while True:
mode = input(
"\nFirst of all, choose a mode: \n[1] Normal mode \n[2] Hide the thimble\n").replace(' ', '')
while True:
if mode.isnumeric() == False or int(mode) != 1 and int(mode) != 2:
mode = input("I said to you to choose 1 or 2.\n")
else:
break
# If the user choose the "normal mode"
if int(mode) == 1:
while True:
# It will reset the amount of attempts every time the player choose to play it.
attempts = 0
# The computer will choose a random number
print("I chose a number between 0 and 10, try to guess it! ")
while True:
pc_number = random.randint(0, 10)
# The user will type a number between 0 and 10 or will get stuck in a loop until they do so.
user_number = Guess()
if user_number != pc_number:
print(
"Oops! You are wrong, let me chose another number... Guess it!")
# When the user win
else:
break
print(f"Yes! You are right! You made it with {attempts} attempts!")
# The user choices if they want to play again or not.
choice = NextGame()
break
if choice not in "[S]":
break
elif int(mode) == 2: # If the user choose the "Hide the thimble mode"
# It will reset the amount of attempts every time the player choose to play it.
attempts = 0
# The computer will choose a random number
pc_number = random.randint(0, 10)
print("I chose a number between 0 and 10, try to guess it!")
# The user will choose a number between 0 and 10, otherwise they will get stuck in a loop.
while True:
user_number = Guess()
if pc_number == user_number: # If the user number is the same as the computer one, the user wins!
break
# If the user's choice is 2 numbers or less apart from the computer one, the user will know they are getting close.
elif pc_number > user_number >= pc_number-2 or pc_number < user_number <= pc_number+2:
print("Hot.")
# Else, they know they aren't close to the computer's number.
else:
print("Cold.")
# When the user win
print(f"Yes! You are right! You made it with {attempts} attempts!")
choice = NextGame()
if choice not in "[S]":
break
# Goodbye
print(f"\nBye, bye! I'll miss you <3")
print("\033[1;34;107mBy: Kaique Apolinário\033[m")
| 36.457944 | 127 | 0.5596 | [
"MIT"
] | Kaique-Apolinario/Python-projects | Python Projects/A Number Guesser(2 modes).py | 3,902 | Python |
# coding=utf-8
import sys
import traceback
import numpy as np
import os
import Putil.base.logger as plog
plog.PutilLogConfig.config_file_handler(filename='./test/data/_log_test_common_data_multiprocess.log', mode='w')
plog.PutilLogConfig.config_log_level(stream=plog.INFO, file=plog.DEBUG)
plog.PutilLogConfig.config_format(plog.FormatRecommend)
plog.PutilLogConfig.config_handler(plog.stream_method | plog.file_method)
logger = plog.PutilLogConfig('TesCommonData').logger()
logger.setLevel(plog.DEBUG)
MainLogger = logger.getChild('Main')
MainLogger.setLevel(plog.DEBUG)
import Putil.test.data.test_common_data_unit as tbase
import Putil.data.common_data as pcd
import multiprocessing
pcd.DataPutProcess.set_running_mode(pcd.DataPutProcess.RunningMode.Debug)
if __name__ == '__main__':
manager_common_data = pcd.CommonDataManager()
manager_common_data.start()
data = manager_common_data.TestCommonData()
manager = multiprocessing.Manager()
pool = multiprocessing.Pool()
dpq = pcd.DataPutProcess(data, manager, pool)
pool.close()
dq = dpq.DataQueue()
restart_param = dict()
restart_param['critical_process'] = 'random_fill'
dpq.restart(**restart_param)
# pool.join()
# print(dpq.queue_process_ret.get())
count = 0
while dpq.has_next():
data = dq.get()
assert len(data) == 1
for k, v in enumerate(data[0]):
assert v.datas().shape[0] == 1
pass
count += 1
pass
assert count == 100
restart_param['device_batch'] = [1]
restart_param['critical_process'] = 'random_fill'
dpq.restart(**restart_param)
count = 0
while dpq.has_next():
dq.get()
count += 1
pass
assert count == 100
restart_param['device_batch'] = [1]
restart_param['critical_process'] = 'allow_low'
dpq.restart(**restart_param)
dpq.pause_queue()
now_size = dpq.DataQueue().qsize()
count = 0
while dpq.paused_and_has_next():
dq.get()
count += 1
pass
assert count == now_size
dpq.continue_queue()
while dpq.has_next():
dq.get()
count += 1
pass
assert count == 100
restart_param['device_batch'] = [1]
restart_param['critical_process'] = 'allow_low'
dpq.restart(**restart_param)
count = 0
while count < 50 and dpq.has_next():
get = dq.get()
assert len(get) == 1
for k, v in enumerate(get[0]):
assert v.datas().shape == (1, 1), print(v.datas().shape)
pass
count += 1
pass
dpq.inject_operation({'recycle': True}, device_batch=[2])
while count < 60 and dpq.has_next():
get = dq.get()
assert len(get) == 1
for k, v in enumerate(get[0]):
assert v.datas().shape == (2, 1), print(v.datas().shape)
pass
count += 1
pass
old_size = dpq.inject_operation({'recycle': False}, device_batch=[1])
while count < 60 + old_size and dpq.has_next():
get = dq.get()
assert len(get) == 1
for k, v in enumerate(get[0]):
assert v.datas().shape == (2, 1), print(get[0].datas().shape)
count += 1
pass
assert count == 60 + old_size, print(count)
remain_count = 100 - (50 + (10 + old_size) * 2)
truck_count = count
while (count - truck_count) < remain_count and dpq.has_next():
get = dq.get()
assert len(get) == 1
for k, v in enumerate(get[0]):
assert v.datas().shape == (1, 1), print(get[0].datas().shape)
count += 1
pass
assert count == old_size + remain_count + 60, print(count)
dpq.stop_generation()
pool.join()
print(dpq.queue_process_ret().get())
# while dq.empty() is False or dpq.EpochDoneFlag.value is False:
# print('get')
# print(dq.get())
pass
| 29.225564 | 112 | 0.621045 | [
"Apache-2.0"
] | balde-soul/Putil | test/data/test_common_data_multiprocess.py | 3,887 | Python |
import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
import time
from tensorflow.keras.layers import Input, Dense, Flatten, Conv1D, MaxPooling1D, UpSampling1D, BatchNormalization, Reshape
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.callbacks import TensorBoard, History, EarlyStopping, ModelCheckpoint
from tensorflow.keras.optimizers import Adam, Nadam, RMSprop
from tensorflow.keras.callbacks import EarlyStopping
from kerastuner.engine.hyperparameters import HyperParameters
from kerastuner.tuners import RandomSearch
from sklearn.preprocessing import MinMaxScaler, StandardScaler
import seaborn as sns
from spectral_analysis.classifiers.neural_network.helper_functions import train_test_split
from spectral_analysis.plotify import Plotify
class AutoEncoder():
def __init__(self, df_source_info, df_fluxes, df_wavelengths, load_model, weights_path=''):
self.load_model = load_model
self.weights_path = weights_path
X = self._prepare_data(df_source_info, df_fluxes, df_wavelengths)
indeces = list(range(len(X)))
X_train, X_test, self.i_train, self.i_test = train_test_split(X, 0.2, indeces=indeces)
X_train, X_val, self.i_train, self.i_val = train_test_split(X_train, 0.2, indeces=indeces)
self.scaler = StandardScaler()
X_train = self.scaler.fit_transform(X_train)
X_test = self.scaler.transform(X_test)
X_val = self.scaler.transform(X_val)
self.X_train = np.expand_dims(X_train, axis=2)
self.X_test = np.expand_dims(X_test, axis=2)
self.X_val = np.expand_dims(X_val, axis=2)
def _prepare_data(self, df_source_info, df_fluxes, df_wavelengths):
# self.df_source_info = df_source_info.loc[df_source_info['class'] == 'QSO']
self.df_source_info = df_source_info
self.objids = self.df_source_info['objid'].to_numpy()
fluxes = df_fluxes.loc[df_fluxes['objid'].isin(self.objids)]
X = np.delete(fluxes.values, 0, axis=1)
X = X[:, 0::2]
print(f'X.shape = {X.shape}')
X = X[:, np.mod(np.arange(X[0].size),25)!=0]
X = X[:,:1792]
print(f'X.shape = {X.shape}')
wavelengths = df_wavelengths.to_numpy()
wavelengths = wavelengths[::2]
self.wavelengths = wavelengths[0:1792]
# plot_spectrum(X[0], wavelengths)
return X
def build_model(self):
# ================================================================================== #
# ==================================== ENCODER ===================================== #
# ================================================================================== #
input_layer = Input(shape=(self.X_train.shape[1], 1))
# encoder
x = Conv1D(filters=256,
kernel_size=7,
activation='relu',
padding='same')(input_layer)
x = MaxPooling1D(4)(x)
x = Conv1D(filters=128,
kernel_size=5,
activation='relu',
padding='same')(x)
x = MaxPooling1D(4)(x)
x = Conv1D(filters=64,
kernel_size=5,
activation='relu',
padding='same')(x)
x = MaxPooling1D(2)(x)
x = Conv1D(filters=32,
kernel_size=3,
activation='relu',
padding='same')(x)
x = MaxPooling1D(2)(x)
x = Conv1D(filters=32,
kernel_size=3,
activation='relu',
padding='same')(x)
x = MaxPooling1D(2)(x)
x = Conv1D(filters=1,
kernel_size=3,
activation='relu',
padding='same')(x)
encoded = MaxPooling1D(2, padding='same')(x)
# ================================================================================== #
# ==================================== DECODER ===================================== #
# ================================================================================== #
x = Conv1D(filters=1,
kernel_size=3,
activation='relu',
padding='same')(encoded)
x = UpSampling1D(2)(x)
x = Conv1D(filters=32,
kernel_size=3,
activation='relu',
padding='same')(x)
x = UpSampling1D(2)(x)
x = Conv1D(filters=32,
kernel_size=3,
activation='relu',
padding='same')(x)
x = UpSampling1D(2)(x)
x = Conv1D(filters=64,
kernel_size=5,
activation='relu',
padding='same')(x)
x = UpSampling1D(2)(x)
x = Conv1D(filters=128,
kernel_size=5,
activation='relu',
padding='same')(x)
x = UpSampling1D(4)(x)
x = Conv1D(filters=256,
kernel_size=7,
activation='relu',
padding='same')(x)
x = UpSampling1D(4)(x)
decoded = Conv1D(1, 1, activation='tanh', padding='same')(x)
self.autoencoder = Model(input_layer, decoded)
self.autoencoder.summary()
self.autoencoder.compile(loss='mse', optimizer='adam')
return self.autoencoder
def train_model(self, epochs, batch_size=32):
model = self.build_model()
if self.load_model == False:
modelcheckpoint = ModelCheckpoint(filepath='logs/1-14_autoencoder.epoch{epoch:02d}.h5',
monitor='val_loss',
save_best_only=True)
history = model.fit(x=self.X_train,
y=self.X_train,
epochs=epochs,
batch_size=32,
validation_data=(self.X_val, self.X_val),
callbacks=[EarlyStopping('val_loss', patience=8), modelcheckpoint])
self.evaluate_model(model)
else:
model.load_weights(self.weights_path)
print(f'model = {model}')
# self.evaluate_model(model)
self.get_bottleneck_values(model)
return model
def get_bottleneck_values(self, model):
bottleneck = model.get_layer('conv1d_5')
extractor = Model(inputs=model.inputs, outputs=[bottleneck.output])
features = extractor(self.X_test)
features = np.squeeze(features, axis=2)
df_source_info_test = pd.DataFrame({'class': self.df_source_info.iloc[self.i_test]['class'].values})
print(f'df_source_info_test = {df_source_info_test}')
df = pd.DataFrame(features)
df = df.join(df_source_info_test)
print(f'df = {df}')
sns.set(style="ticks", color_codes=True)
sns.pairplot(df, hue='class')
plt.savefig('plots/autoencoder_pairplot', dpi=100)
def evaluate_model(self, model):
preds = model.predict(self.X_test)
print(self.X_test.shape)
self.X_test = np.squeeze(self.X_test, axis=2)
preds = np.squeeze(preds, axis=2)
print(self.X_test.shape)
self.X_test = self.scaler.inverse_transform(self.X_test)
preds = self.scaler.inverse_transform(preds)
for i in range(100):
qso_ra = self.df_source_info.iloc[self.i_test[i]]['ra']
qso_dec = self.df_source_info.iloc[self.i_test[i]]['dec']
qso_plate = self.df_source_info.iloc[self.i_test[i]]['plate']
qso_z = self.df_source_info.iloc[self.i_test[i]]['z']
qso_class = self.df_source_info.iloc[self.i_test[i]]['class']
plotify = Plotify(theme='ugly')
_, axs = plotify.get_figax(nrows=2, figsize=(5.8, 8))
axs[0].plot(self.wavelengths, self.X_test[i], color=plotify.c_orange)
axs[1].plot(self.wavelengths, preds[i], color=plotify.c_orange)
axs[0].set_title(f'ra = {qso_ra}, dec = {qso_dec}, \n z = {qso_z}, plate = {qso_plate}, class = {qso_class} \n', fontsize=14)
axs[1].set_title(f'Autoencoder recreation \n')
axs[0].set_ylabel(r'$F_{\lambda[10^{-17} erg \: cm^{-2}s^{-1} Å^{-1}]}$', fontsize=14)
axs[1].set_ylabel(r'$F_{\lambda[10^{-17} erg \: cm^{-2}s^{-1} Å^{-1}]}$', fontsize=14)
axs[1].set_xlabel('Wavelength (Å)')
plt.subplots_adjust(hspace=0.4)
plt.savefig(f'plots/autoencoder/__all_sources/_autoencoder_{i}', dpi=160)
return preds
def main():
df_fluxes = pd.read_hdf('data/sdss/preprocessed/balanced.h5', key='fluxes').head(5000)
df_source_info = pd.read_hdf('data/sdss/preprocessed/balanced.h5', key='source_info').head(5000)
df_wavelengths = pd.read_hdf('data/sdss/preprocessed/balanced.h5', key='wavelengths')
ae = AutoEncoder(df_source_info, df_fluxes, df_wavelengths, load_model=False, weights_path='logs/colab-logs/_all_sources1-14_autoencoder.epoch30.h5')
ae.train_model(epochs=12, batch_size=64)
if __name__ == "__main__":
main() | 38 | 153 | 0.544002 | [
"MIT"
] | csepreghy/spectral-analysis | spectral_analysis/unsupervised_learning/autoencoder/autoencoder_bestmodel.py | 9,389 | Python |
# -*- coding: utf-8 -*-
"""
Django settings for server project.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their config, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
from typing import Tuple
from server.settings.components import BASE_DIR, config
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
SECRET_KEY = config('DJANGO_SECRET_KEY')
# Application definition:
INSTALLED_APPS: Tuple[str, ...] = (
# Default django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# django-admin:
'django.contrib.admin',
'django.contrib.admindocs',
# Security:
'axes',
# Your apps go here:
'server.main_app',
)
MIDDLEWARE: Tuple[str, ...] = (
# Content Security Policy:
'csp.middleware.CSPMiddleware',
# Django:
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'server.urls'
WSGI_APPLICATION = 'server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
# Choices are: postgresql_psycopg2, mysql, sqlite3, oracle
'ENGINE': 'django.db.backends.postgresql_psycopg2',
# Database name or filepath if using 'sqlite3':
'NAME': config('POSTGRES_DB'),
# You don't need these settings if using 'sqlite3':
'USER': config('POSTGRES_USER'),
'PASSWORD': config('POSTGRES_PASSWORD'),
'HOST': config('DJANGO_DATABASE_HOST'),
'PORT': config('DJANGO_DATABASE_PORT', cast=int),
'CONN_MAX_AGE': config('CONN_MAX_AGE', cast=int, default=60),
},
}
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
LANGUAGES = (
('en', 'English'),
('ru', 'Russian'),
)
LOCALE_PATHS = (
'locale/',
)
USE_TZ = True
TIME_ZONE = 'UTC'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Templates
# https://docs.djangoproject.com/en/1.11/ref/templates/api
TEMPLATES = [{
'APP_DIRS': True,
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
# Contains plain text templates, like `robots.txt`:
BASE_DIR.joinpath('server', 'templates'),
],
'OPTIONS': {
'context_processors': [
# default template context processors
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
],
},
}]
# Media files
# Media-root is commonly changed in production
# (see development.py and production.py).
MEDIA_URL = '/media/'
MEDIA_ROOT = BASE_DIR.joinpath('media')
# Django default authentication system.
# https://docs.djangoproject.com/en/1.11/topics/auth/
# AUTH_USER_MODEL = 'auth_app.User'
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.Argon2PasswordHasher',
]
| 26.30303 | 72 | 0.698387 | [
"MIT"
] | FrolovIlya78/wemake-django-template | {{cookiecutter.project_name}}/server/settings/components/common.py | 4,340 | Python |
from setuptools import find_packages, setup
setup(
name='qualprep',
packages=find_packages(include=['qualprep']),
version='0.1.1',
description='Python library to prepare data',
author='Lena Berger',
license='MIT',
install_requires=['pandas', 'numpy', 'tqdm'],
setup_requires=['pytest-runner'],
tests_require=['pytest==4.4.1'],
test_suite='tests',
) | 28.714286 | 50 | 0.641791 | [
"BSD-2-Clause"
] | berglen/qualprep | setup.py | 402 | Python |
from puq import *
import numpy as np
# test case with just a single point
def run():
# Declare our parameters here. Both are uniform on [-2, 2]
x = UniformParameter('x', 'x', min=-2, max=2)
y = UniformParameter('y', 'y', min=-2, max=2)
# Create a host
host = InteractiveHost()
# any of these should work
# valarray = np.array([[1],[0]])
valarray = [[-1,0,1], [0,0,0]]
uq = SimpleSweep([x,y], valarray)
prog = TestProgram('./rosen_prog.py')
return Sweep(uq, host, prog)
| 23.590909 | 62 | 0.597303 | [
"MIT"
] | zoidy/puq | examples/simple_sweep/rosen_1.py | 519 | Python |
# import datetime, time
from typing import List
from datetime import datetime, timedelta
import pytz
import os
from puffmarker.domain.datapoint import DataPoint
from puffmarker.input.import_stream_processor_inputs import load_data, load_data_offset
data_dir = '/home/nsaleheen/data/rice_ema_puffmarker_activity_loc/'
# data_dir = '/home/nsaleheen/data/RICE_data/without_raw_data/'
smoking_self_report_file = 'SMOKING+SELF_REPORT+PHONE.csv'
activity_type_file = 'ACTIVITY_TYPE+PHONE.csv'
puffmarker_smoking_epi_cloud_file = 'PUFFMARKER_SMOKING_EPISODE+PHONE.csv'
# streamprocessor_puffmarker_smoking_epi_file = 'streamprocessor.puffMarker.smoking.episode.rip.wrist.combine.csv'
streamprocessor_puffmarker_smoking_epi_file = 'org.md2k.streamprocessor+PUFFMARKER_SMOKING_EPISODE+PHONE.csv'
# streamprocessor_puffmarker_smoking_epi_file = 'puffmarker_streamprocessor.csv'
ema_random_file = 'EMA+RANDOM_EMA+PHONE.csv'
ema_smoking_file = 'EMA+SMOKING_EMA+PHONE.csv'
ema_end_of_day_file = 'EMA+END_OF_DAY_EMA+PHONE.csv'
ema_stressed_file = 'EMA+STRESS_EMA+PHONE.csv'
tz = pytz.timezone('US/Central')
print(tz)
# unix time to '2017-11-01 15:52:00'
def unixtime_to_datetime_pre(timestamp):
timestamp = timestamp / 1000
dt = datetime.fromtimestamp(timestamp, tz).strftime('%m-%d-%Y %H:%M:%S')
return dt
def unixtime_to_datetime(timestamp):
timestamp = timestamp / 1000
dt = datetime.fromtimestamp(timestamp, tz).strftime('%m/%d %H:%M:%S')
return dt
# unix time to '2017-11-01 15:52:00' -> '2017-11-01'
def unixtime_to_date(timestamp):
dt = unixtime_to_datetime(timestamp)
return dt.split(' ')[0]
# unix time to '2017-11-01 15:52:00' -> '15:52:00'
def unixtime_to_time(timestamp):
dt = unixtime_to_datetime(timestamp)
return dt.split(' ')[1]
# unix time to '15*52' in minutes
def unixtime_to_timeOfDay(timestamp):
tm = unixtime_to_time(timestamp)
toks = tm.split(':')
h = int(toks[0])
m = int(toks[1])
timeOfday = h * 60 + m
return timeOfday
ut = 1512506705814 # 1386181800
print(unixtime_to_datetime(ut))
print(unixtime_to_date(ut))
print(unixtime_to_time(ut))
print(unixtime_to_timeOfDay(ut))
# timezone = datetime.timezone(datetime.timedelta(milliseconds=offset))
# ts = datetime.datetime.fromtimestamp(ts, timezone)
import json
def get_fileName(cur_dir, file_sufix):
filenames = [name for name in os.listdir(cur_dir) if name.endswith(file_sufix)]
# print(file_sufix + ':' + str(filenames))
if len(filenames) > 0:
return filenames[0]
else:
return None
def get_EMA_data(cur_dir, filename):
if filename is None:
return []
fp = open(cur_dir + filename)
file_content = fp.read()
fp.close()
lines = file_content.splitlines()
data = []
for line in lines:
if len(line) > 1:
ts, offset, sample = line.split(',', 2)
# start_time = int(ts)
start_time = int(float(ts)) / 1000.0
start_time = datetime.fromtimestamp(start_time)
offset = int(offset)
sample = sample[1:-1]
data.append([start_time, offset, sample])
return data
# random ema + stressed EMA
# sample = (#smoked, from_time, to_time); eg: "2 hrs - 4 hrs" one cig smoked (1, 2*60*60*1000, 4*60*60*1000)
def get_random_EMA(cur_dir, filename) -> List[DataPoint]:
emas = get_EMA_data(cur_dir, filename)
data = []
for ema in emas:
d = ema[2]
jsn_file = json.loads(d)
status = jsn_file['status']
# print(jsn_file['status'])
if status == 'COMPLETED':
is_smoked = jsn_file['question_answers'][32]['response'][0]
if is_smoked == 'Yes':
nSmoked = jsn_file['question_answers'][33]['response'][0]
if int(nSmoked) == 1:
nQI = 34
else:
nQI = 35
# options: ["0 - 2 hrs", "2 hrs - 4 hrs", "4 hrs - 6 hrs", "6 hrs - 8 hrs", "8 hrs - 10 hrs", "10 hrs - 12 hrs", "More than 12 hrs"]
howlong_ago = jsn_file['question_answers'][nQI]['response']
sample = [int(nSmoked)]
for hla in howlong_ago:
hla = str(hla)
if hla in ["More than 12 hrs"]:
sample.extend([12 * 60 * 60 * 1000, 24 * 60 * 60 * 1000])
continue
st = hla.split('-')[0]
et = hla.split('-')[1]
st = st.split(' ')[0]
st = int(st.strip()) * 60 * 60 * 1000
et = et.strip().split(' ')[0]
et = int(et.strip()) * 60 * 60 * 1000
sample.extend([st, et])
# print([ema[0], ema[1], nSmoked, howlong_ago, sample])
# data.append([ema[0], ema[1], int(nSmoked)])
data.append(DataPoint(start_time=ema[0], offset=ema[1], sample=sample))
return data
# Confirm refute
def get_smoking_EMA(cur_dir, filename) -> List[DataPoint]:
emas = get_EMA_data(cur_dir, filename)
data = []
for ema in emas:
d = ema[2]
jsn_file = json.loads(d)
status = jsn_file['status']
if status == 'COMPLETED':
is_smoked = jsn_file['question_answers'][0]['question_answer'][0:3]
# print(is_smoked)
if is_smoked.lower() == 'yes':
data.append(DataPoint(start_time=ema[0], offset=ema[1], sample=1))
# data.append([ema[0], ema[1], 1])
else:
data.append(DataPoint(start_time=ema[0], offset=ema[1], sample=0))
# data.append([ema[0], ema[1], 0])
return data
def get_smoking_self_report(cur_dir, filename) -> List[DataPoint]:
emas = get_EMA_data(cur_dir, filename)
data = []
for ema in emas:
d = ema[2]
jsn_file = json.loads(d)
status = jsn_file['message']
if 'YES' in status:
# print(status)
data.append(DataPoint(start_time=ema[0], offset=ema[1], sample=1))
# print(ema)
# data.append([ema[0], ema[1], status])
return data
cur_dir = data_dir + '2007/'
# emas = get_smoking_self_report(cur_dir, get_fileName(cur_dir, smoking_self_report_file))
# print(emas)
# emas = get_smoking_EMA(cur_dir, get_fileName(cur_dir, ema_smoking_file))
# print(emas)
# emas = get_random_EMA(cur_dir, get_fileName(cur_dir, ema_stressed_file))
# print(emas)
# emas = get_random_EMA(cur_dir, get_fileName(cur_dir, ema_random_file))
# print(emas)
def get_RICE_PILOT_EMAs(pid):
cur_dir = data_dir + pid + '/'
# smoking_epis = load_data(cur_dir + get_fileName(cur_dir, streamprocessor_puffmarker_smoking_epi_file))
smoking_epis = load_data_offset(cur_dir + get_fileName(cur_dir, streamprocessor_puffmarker_smoking_epi_file))
smoking_selfreport = get_smoking_self_report(cur_dir, get_fileName(cur_dir, smoking_self_report_file))
smoking_emas = get_smoking_EMA(cur_dir, get_fileName(cur_dir, ema_smoking_file))
random_emas = get_random_EMA(cur_dir, get_fileName(cur_dir, ema_random_file))
stressed_emas = get_random_EMA(cur_dir, get_fileName(cur_dir, ema_stressed_file))
sup_sr = [0] * len(smoking_epis)
sup_cr = [0] * len(smoking_epis)
sup_ema = [0] * len(smoking_epis)
for i, epi in enumerate(smoking_epis):
for sr in smoking_selfreport:
time_diff = (sr.start_time - epi.start_time).total_seconds()
if (time_diff > -1800 and time_diff < 1800):
sup_sr[i] = 1
break
for sr in smoking_emas:
time_diff = (sr.start_time - epi.start_time).total_seconds()
if (time_diff > -600 and time_diff < 1800):
sup_cr[i] = 1
break
for re in random_emas:
st = re.start_time - timedelta(milliseconds=re.sample[2])
et = re.start_time - timedelta(milliseconds=re.sample[1])
if (epi.start_time >= st and epi.start_time <= et):
sup_ema[i] = 1
break
for re in stressed_emas:
st = re.start_time - timedelta(milliseconds=re.sample[2])
et = re.start_time - timedelta(milliseconds=re.sample[1])
if (epi.start_time >= st and epi.start_time <= et):
sup_ema[i] = 1
break
sup = [sup_sr[i] * 100 + sup_cr[i] * 10 + sup_ema[i] for i in range(len(sup_ema))]
print('se=' + str(len(smoking_epis)) + ' : sup sr = ' + str(sum(sup_sr)) + ' : sup cr = ' + str(
sum(sup_cr)) + ' : sup ema = ' + str(sum(sup_ema)))
non_sup = len([v for v in sup if v == 0])
print('Supported : Not supported = ' + str(len(sup) - non_sup) + ' : ' + str(non_sup))
# print(sup)
# print(len(smoking_selfreport))
# print(len(smoking_emas))
# print(len(random_emas))
# print(len(stressed_emas))
# print(smoking_epis)
# print(smoking_emas)
# print(smoking_selfreport)
# print(random_emas)
# print(stressed_emas)
#
# , "2008", "2010", "2011", "2012"
pids = ["2006", "2007", "2009", "2013", "2014", "2015", "2016", "2017"]
# for pid in pids:
# print('-----------' + pid + '---------------------------')
# get_RICE_PILOT_EMAs(pid)
get_RICE_PILOT_EMAs('2006')
# -----------2006---------------------------
# se=25 : sup sr = 19 : sup cr = 18 : sup ema = 4
# Supported : Not supported = 21 : 4
# -----------2007---------------------------
# se=6 : sup sr = 5 : sup cr = 6 : sup ema = 0
# Supported : Not supported = 6 : 0
# -----------2009---------------------------
# se=32 : sup sr = 14 : sup cr = 30 : sup ema = 10
# Supported : Not supported = 30 : 2
# -----------2013---------------------------
# se=113 : sup sr = 72 : sup cr = 108 : sup ema = 49
# Supported : Not supported = 113 : 0
# -----------2014---------------------------
# se=44 : sup sr = 6 : sup cr = 43 : sup ema = 23
# Supported : Not supported = 44 : 0
# -----------2015---------------------------
# se=0 : sup sr = 0 : sup cr = 0 : sup ema = 0
# Supported : Not supported = 0 : 0
# -----------2016---------------------------
# se=0 : sup sr = 0 : sup cr = 0 : sup ema = 0
# Supported : Not supported = 0 : 0
# -----------2017---------------------------
# se=8 : sup sr = 0 : sup cr = 5 : sup ema = 2
# Supported : Not supported = 5 : 3 | 36.25784 | 148 | 0.591486 | [
"MIT"
] | nsaleheen/puffmarker_plus_plus | puffmarker/input/import_RICE_pilot_data.py | 10,406 | Python |
def get_min_max(ints):
"""
Return a tuple(min, max) out of list of unsorted integers.
Args:
ints(list): list of integers containing one or more integers
"""
# Handle non-list input
if not isinstance(ints, list):
return None, None
# Define variables for min and max value and initialize to None
min_value = None
max_value = None
for index, value in enumerate(ints):
if index == 0:
min_value = value
max_value = value
if value < min_value:
min_value = value
elif value > max_value:
max_value = value
return min_value, max_value
# Example Test Case of Ten Integers
import random
# Test case 1: random int array
l = [i for i in range(0, 10)] # a list containing 0 - 9
print(f"Test case 1 - random list of int: {l}")
random.shuffle(l)
# Should print "Pass" as the result should be (0, 9)
print ("Pass" if ((0, 9) == get_min_max(l)) else "Fail")
# Test case 2: empty array
print(f"Test case 2 - empty array")
# Should print "Pass" as the result should be (None, None)
print ("Pass" if ((None, None) == get_min_max([])) else "Fail")
# Test case 3: array with single item
print(f"Test case 3 - array with single item")
# Should print "Pass" as the result should be (None, None)
print ("Pass" if ((1, 1) == get_min_max([1])) else "Fail")
# Test case 4: non array input
print(f"Test case 4 - non array input")
# Should print "Pass" as the result should be (None, None)
print ("Pass" if ((None, None) == get_min_max(10)) else "Fail")
| 26.644068 | 67 | 0.636768 | [
"MIT"
] | johangenis/problems_vs_algorithms | problem_6.py | 1,572 | Python |
from bravado_core.request import IncomingRequest, unmarshal_request
from bravado_core.swagger20_validator import ValidationError
from .support import OpenAPITest
class OpenAPIRequestsValidationTest(OpenAPITest):
def setUp(self):
super().setUp()
self.request = IncomingRequest()
self.request.path = {}
self.request.headers = {}
self.request.query = {}
self.request._json = {}
self.request.json = lambda: self.request._json
def test_validate_bucket_path(self):
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Buckets"].get_bucket
)
def test_validate_groups_path(self):
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Groups"].get_groups
)
def test_validate_group_path(self):
paths = [{}, {"bucket_id": "b1"}, {"id": "g1"}]
for path in paths:
self.request.path = path
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Groups"].get_group,
)
def test_validate_collections_path(self):
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Collections"].get_collections,
)
def test_validate_collection_path(self):
paths = [{}, {"bucket_id": "b1"}, {"id": "c1"}]
for path in paths:
self.request.path = path
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Collections"].get_collection,
)
def test_validate_records_path(self):
paths = [{}, {"bucket_id": "b1"}, {"collection_id": "c1"}]
for path in paths:
self.request.path = path
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Records"].get_records,
)
def test_validate_record_path(self):
paths = [{}, {"bucket_id": "b1", "collection_id": "c1"}, {"id": "r1"}]
for path in paths:
self.request.path = path
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Records"].get_record,
)
def test_validate_data(self):
bodies = [{"data": "aaa"}]
for body in bodies:
self.request._json = body
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Buckets"].create_bucket,
)
def test_validate_permissions(self):
bodies = [
{"permissions": "aaa"},
{"permissions": {"read": "aaa"}},
{"permissions": {"read": [111]}},
]
for body in bodies:
self.request._json = body
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Buckets"].create_bucket,
)
def test_validate_queries(self):
queries = [{"_since": "aaa"}, {"_before": "aaa"}, {"_limit": "aaa"}, {"_token": {}}]
for query in queries:
self.request.query = query
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Buckets"].get_buckets,
)
def test_validate_headers(self):
headers = [{"If-None-Match": "123"}, {"If-Match": "123"}]
for head in headers:
self.request.headers = head
self.assertRaises(
ValidationError,
unmarshal_request,
self.request,
self.resources["Buckets"].get_buckets,
)
def test_validate_batch_requests_method(self):
self.request._json = {"requests": [{"method": "AAA", "path": "/buckets/b1"}]}
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Batch"].batch
)
def test_validate_batch_requests_path(self):
self.request._json = {"requests": [{"method": "GET", "path": 123}]}
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Batch"].batch
)
def test_validate_batch_requests_body(self):
self.request._json = {"requests": [{"method": "GET", "path": "/buckets/b1", "body": []}]}
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Batch"].batch
)
def test_validate_batch_requests_header(self):
self.request._json = {
"requests": [{"method": "GET", "path": "/buckets/b1", "body": {}, "headers": []}]
}
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Batch"].batch
)
def test_validate_batch_defaults(self):
self.request._json = {
"defaults": [],
"requests": [{"method": "GET", "path": "/buckets/b1"}],
}
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Batch"].batch
)
def test_validate_batch_defaults_method(self):
self.request._json = {"defaults": {"method": "AAA"}, "requests": [{"path": "/buckets/b1"}]}
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Batch"].batch
)
def test_validate_batch_defaults_body(self):
self.request._json = {
"defaults": {"body": []},
"requests": [{"method": "PUT", "path": "/buckets/b1"}],
}
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Batch"].batch
)
def test_validate_batch_defaults_headers(self):
self.request._json = {
"defaults": {"headers": []},
"requests": [{"method": "GET", "path": "/buckets/b1"}],
}
self.assertRaises(
ValidationError, unmarshal_request, self.request, self.resources["Batch"].batch
)
| 34.870968 | 99 | 0.557046 | [
"Apache-2.0"
] | dreuse/kinto | tests/openapi/test_validation.py | 6,486 | Python |
def calculate_critical_value(size : int, alpha : float) -> float:
t_dist = stats.t.ppf(1 - alpha / (2 * size), size - 2)
numerator = (size - 1) * np.sqrt(np.square(t_dist))
denominator = np.sqrt(size) * np.sqrt(size - 2 + np.square(t_dist))
critical_value = numerator / denominator
print("Grubbs Critical Value: {}".format(critical_value))
return critical_value | 55.857143 | 72 | 0.659847 | [
"Apache-2.0"
] | bharathjinka09/docly-demo | script_2.py | 391 | Python |
"""
Visualizing bidirectionnal Dijkstra
using matplotlib
"""
import sys
from dataclasses import dataclass
from heapq import heappush, heappop
from itertools import permutations
from collections import defaultdict
import matplotlib
from matplotlib import pyplot as plt
import matplotlib.animation as animation
from dijkstra import (
Node,
generate_random_graph,
build_shortest_path,
dijkstra,
)
@dataclass
class Context:
distances: dict
previous: dict
node: None
visited_nodes: set
def dijkstra_iterator(nodes: list[Node], src_id: int, hf=lambda x: 0.0):
"""
Internal loop of the Dijkstra algorithm
as a step by step iterator
hf is an optional heuristic
"""
visited_nodes = set()
h: list[tuple[float, Node]] = []
previous = dict()
distances = defaultdict(lambda: sys.maxsize)
distances[src_id] = hf(nodes[src_id])
ctx: Context = Context(
previous=previous,
distances=distances,
node=None,
visited_nodes=visited_nodes,
)
heappush(h, (0.0, nodes[src_id]))
while h:
_, node = heappop(h)
if node.id in visited_nodes:
continue
dist = distances[node.id]
for n, d in (
(nodes[k], v)
for k, v in node.neighbours.items()
if k not in visited_nodes
):
new_dist = dist + d
cost = new_dist + hf(n) - hf(node)
if cost <= distances[n.id]:
distances[n.id] = cost
previous[n.id] = node.id
heappush(h, (cost, n))
visited_nodes.add(node.id)
ctx.node = node
yield ctx
ctx.node = None
yield ctx
def dijkstra_forward(
nodes: list[Node], src_id: int, dst_id: int, hf=lambda x: 0.0
) -> list[int]:
"""
'classical' forward Dijkstra but based on our iterator.
"""
coro = dijkstra_iterator(nodes, src_id, hf=hf)
for ctx in coro:
if ctx.node is None:
return [], []
elif ctx.node.id == dst_id:
return ctx.distances[dst_id], list(
build_shortest_path(ctx.previous, dst_id, src_id)
)
def bidir_dijkstra(
nodes: list[Node],
src_id: int,
dst_id: int,
hff=lambda _: 0.0,
hfb=lambda _: 0.0,
consistent: bool = True,
) -> list[int]:
"""
bidirectionnal dijkstra, we search from both start => end
and end => start using two iterators.
hff and hfb are optional heuristics
for respectively the forward and backward iterators
(for later bidir A*)
"""
forward = dijkstra_iterator(nodes, src_id, hf=hff)
backward = dijkstra_iterator(nodes, dst_id, hf=hfb)
shortest = sys.maxsize
forward_node = backward_node = None
f = []
b = []
for idx, (ctx_forward, ctx_backward) in enumerate(zip(forward, backward)):
if any(x.node is None for x in (ctx_forward, ctx_backward)):
# no path between the two nodes
return [], [], (f, b)
f.append(ctx_forward.node)
b.append(ctx_backward.node)
if forward_node and (
not consistent
or sum(
x.distances[x.node.id] - hf(x.node)
for x, hf in ((ctx_forward, hff), (ctx_backward, hfb))
)
>= shortest
):
forward_path = build_shortest_path(
ctx_forward.previous, forward_node.id, src_id
)
backward_path = build_shortest_path(
ctx_backward.previous, backward_node.id, dst_id
)[::-1]
path = forward_path + backward_path
return (
shortest,
path,
(f, b),
)
else:
for (ctx, hf), (ctx2, hf2) in permutations(
((ctx_forward, hff), (ctx_backward, hfb)), 2
):
for n, d in ctx.node.neighbours.items():
if n in ctx2.visited_nodes:
distance = (
ctx.distances[ctx.node.id]
+ ctx2.distances[n]
+ d
- hf(ctx.node)
- hf2(nodes[n])
)
if distance < shortest:
shortest = distance
forward_node = (
ctx.node if ctx is ctx_forward else nodes[n]
)
backward_node = (
ctx.node if ctx is ctx_backward else nodes[n]
)
print(
f'Iter_{idx}: contact between {forward_node}->{backward_node} with d={shortest}'
)
class Animator:
"""
Builds an animation from
a bidir shortest path finder.
"""
def __init__(self, nodes: list[Node], title='', draw_edges=True) -> None:
self.fig, self.ax = plt.subplots()
plt.title(title)
plt.tight_layout()
self.ax.set_aspect('equal')
self.i = True
if draw_edges:
edges = {
tuple(sorted((n.id, x))) for n in nodes for x in n.neighbours
}
for edge in edges:
from_node, to_node = [nodes[x] for x in edge]
x = [n.x for n in (from_node, to_node)]
y = [n.y for n in (from_node, to_node)]
plt.plot(x, y, color='gray', linewidth=0.5)
x, y = [n.x for n in nodes], [n.y for n in nodes]
self.ax.scatter = plt.scatter(
x,
y,
c=[0 for _ in range(len(x))],
s=[30] + [10] * (len(nodes) - 2) + [30],
vmin=0,
vmax=3,
cmap=matplotlib.colors.ListedColormap(
['grey', 'springgreen', 'red', 'white']
),
)
self._colors = self.ax.scatter.get_array()
for n in nodes:
if not n.neighbours:
self._colors[n.id] = 3
def update(self, nodes: tuple[Node, Node, list[Node]]):
"""
Updates the plot with a tuple of nodes (forward, backward, shortest_path)
"""
f, b, s = nodes
if not s:
self._colors[f.id] = 1
self._colors[b.id] = 2
self.ax.scatter.set_array(self._colors)
return (self.ax.scatter,)
else:
x = [n.x for n in s]
y = [n.y for n in s]
if self.i:
c = 'green'
else:
c = 'orange'
ap = self.ax.plot(x, y, color=c, linewidth=2)
self.i = not (self.i)
return ap
def make_animated_gif(
title: str,
g: list[Node],
dst_file: str,
fs: list[Node],
bs: list[Node],
shortest: list[Node],
draw_edges: bool = True,
writer: str = 'ffmpeg',
interval: int = 250,
blinking_ratio=0.5,
):
"""
Makes an animated gif out of two sequences of forward (fs) and backward (bs)
path-finding algorithm. The final shortest path will be blinked.
"""
anim = Animator(g, title=title, draw_edges=draw_edges)
def node_gen():
for fn, bn in zip(fs, bs):
yield fn, bn, []
res = [g[i] for i in shortest]
for _ in range(int(len(fs) * blinking_ratio)):
yield _, _, res
ani = animation.FuncAnimation(
anim.fig,
anim.update,
node_gen(),
interval=interval,
blit=True,
repeat_delay=500,
save_count=len(fs) * 2,
)
ani.save(f'imgs/{dst_file}', writer=writer)
if __name__ == '__main__':
# sanity check on the iterator versus 'simple' implementation
g = generate_random_graph(100, connect_probability=0.1)
cost, sp = dijkstra_forward(g, 0, len(g) - 1)
cost2, sp2 = dijkstra(g, 0, len(g) - 1)
# we also compare our bidir version agaisnt the other two ^^
cost3, sp3, (f, b) = bidir_dijkstra(g, 0, len(g) - 1)
# and against a backward run only
cost4, sp4 = dijkstra_forward(g, len(g) - 1, 0)
sp4 = sp4[::-1]
print(cost, cost2, cost3, cost4)
for p in (sp, sp2, sp4, sp3):
print(' -> '.join(str(p) for p in p))
assert sp == sp2 == sp3 == sp4
make_animated_gif(
f'Bidir Dijkstra n={len(f)}', g, 'bidir_100.gif', f, b, sp3
)
| 28.38796 | 112 | 0.521206 | [
"MIT"
] | colon3ltocard/pythonalgorithms | bidir_dijkstra.py | 8,488 | Python |
'''Deoxyribonucleic acid (DNA) is a chemical found in the nucleus
of cells and carries the "instructions" for the development and functioning of living organisms.
If you want to know more http://en.wikipedia.org/wiki/DNA
In DNA strings, symbols "A" and "T" are complements of each other,
as "C" and "G". You have function with one side of the DNA
(string, except for Haskell); you need to get the other complementary side.
DNA strand is never empty or there is no DNA at all (again, except for Haskell).'''
#ATTGC >>>> TAACG
def DNA_strand(dna):
Dna_dict = {'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G'}
return ''.join(Dna_dict[letter] for letter in dna if letter in Dna_dict.keys())
'''also we can simply do : return dna.translate(str.maketrans('ATTGC', 'TAACG'))'''
| 41 | 96 | 0.698331 | [
"MIT"
] | Thearakim/warcode | DNA.py | 779 | Python |
import os
import time
import sys
total = 0.0
N = 10001 # number of cases (seeds) to test
for seed in range(1,N):
#vis_command = "java TerrainCrossingVis -exec \"/home/dawid/TopCoder/TerrainCrossing/./TerrainCrossing\" -novis -seed "
vis_command = "java TerrainCrossingVis -exec \"$PWD/./TerrainCrossing\" -novis -seed "
vis_command = vis_command + str(seed)
start_time = time.time()
output = os.popen(vis_command).readlines()
finish_time = time.time()
time_elapsed = finish_time - start_time
if(time_elapsed > 10.0):
print("Exiting...")
sys.exit()
print("Case " + str(seed-1) + " time: " + str(time_elapsed) + " score: " + str(float(output[0][:-1])), end="\n")
total = total + float(output[0][:-1])
#total = total + float(output[-1])
mean = total/(N-1)
print("Mean score: " + str(mean))
| 26.90625 | 123 | 0.630662 | [
"MIT"
] | diwadd/TerrainCrossing | testing_solution.py | 861 | Python |
from django.urls import path
from .views import Index, language_switch
app_name = 'portfolio'
urlpatterns = [
path('', Index.as_view(), name='index'),
path('switch-lang/<str:lang>/', language_switch, name='switch-lang'),
]
| 25.777778 | 73 | 0.698276 | [
"MIT"
] | mahdimehrabi/django-portfolio-app | project/apps/portfolio/urls.py | 232 | Python |
import keras.backend as K
from keras.models import Model
from keras.layers import Dense, MaxPooling2D, Conv2D, Activation, \
Dropout, Reshape, Input, BatchNormalization, GRU, Bidirectional, Permute, TimeDistributed
from keras.optimizers import Adam, SGD, Adadelta, RMSprop
import src.model.mfom as mfom
import src.model.objectives as obj
from src.base.model import BaseModel
class SEDOgitsModel(BaseModel):
"""
The Sound Event Detection model.
It has time distributed output layer
# Arguments
input shape: [batch_sz; band; frame_wnd; channel]
"""
def __init__(self, config, input_shape, nclass):
super(SEDOgitsModel, self).__init__(config)
self.input_shape = input_shape
self.nclass = nclass
self.build()
def build(self):
"""
Construct the main structure of the network
"""
print('DNN input shape', self.input_shape)
if K.image_dim_ordering() == 'tf':
batch_sz, bands, frames, channels = self.input_shape
assert channels >= 1
channel_axis = 3
freq_axis = 1
nn_shape = (bands, frames, channels)
else:
raise NotImplementedError('[ERROR] Only for TensorFlow background.')
nb_filters = self.config['feature_maps']
dropout_rate = self.config['dropout']
pool_sz = [5, 2, 2] # max-pooling across frequency only
gru_nb = [32] # [32, 32]
fc_nb = [32]
# Input block
feat_input = Input(shape=nn_shape, name='input')
x = BatchNormalization(axis=freq_axis, name='bn_0_freq')(feat_input)
# CNN block
for sz in pool_sz:
x = Conv2D(filters=nb_filters, kernel_size=(3, 3), padding='same')(x)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation(self.config['activation'])(x)
x = MaxPooling2D(pool_size=(sz, 1))(x)
x = Dropout(dropout_rate)(x)
x = Permute((2, 1, 3))(x)
x = Reshape((frames, -1))(x)
# GRU block
for n in gru_nb:
x = Bidirectional(
GRU(n, activation='tanh', dropout=dropout_rate,
recurrent_dropout=dropout_rate, return_sequences=True),
merge_mode='mul')(x)
# Fully connected
for n in fc_nb:
x = TimeDistributed(Dense(n))(x)
x = Dropout(dropout_rate)(x)
x = TimeDistributed(Dense(self.nclass))(x)
# out dim: [batch, frames, nclass]
y_pred = Activation(activation=self.config['out_score'], name='output')(x)
self._compile_model(input=feat_input, output=y_pred, params=self.config)
def rebuild(self, new_config):
"""
Recompile the model with the new hyper parameters.
NOTE: network topology is changing according to the 'new_config'
"""
self.config.update(new_config)
batch_sz, bands, frames, channels = self.input_shape
self.input_shape = (self.config['batch'], bands, self.config['context_wnd'], channels)
self.build()
def chage_optimizer(self, new_config, change_out_unit=False):
"""
Recompile the model with the new loss and optimizer.
NOTE: network topology is not changing.
"""
if new_config['freeze_wt']:
# train only the top layers,
# i.e. freeze all lower layers
for layer in self.model.layers[:-4]:
layer.trainable = False
# cut MFoM layers: use only output prediction scores
input = self.model.get_layer(name='input').output
output = self.model.get_layer(name='output').output
if change_out_unit:
la = self.model.layers[-2].output
output = Activation(activation=new_config['out_score'], name='output')(la)
print('[INFO] output scores has been changed: %s to %s' % (self.config['out_score'], new_config['out_score']))
self._compile_model(input=input, output=output, params=new_config)
def forward(self, x):
out_model = self.model
if self.model.loss in obj.MFOM_OBJECTIVES:
input = self.model.get_layer(name='input').output
preact = self.model.get_layer(name='output').output
out_model = Model(input=input, output=preact)
return out_model.predict(x)
def _compile_model(self, input, output, params):
"""
Compile network structure with particular loss and optimizer
"""
# ===
# choose loss
# ===
if params['loss'] in obj.MFOM_OBJECTIVES:
# add 2 layers for Maximal Figure-of-Merit
_, _, frames, _ = self.input_shape
y_true = Input(shape=(frames, self.nclass), name='y_true')
psi = mfom.UvZMisclassification(name='uvz_misclass')([y_true, output])
y_pred = mfom.SmoothErrorCounter(name='smooth_error_counter')(psi)
# MFoM need labels info during training
input = [y_true, input]
output = y_pred
loss = obj.MFOM_OBJECTIVES[params['loss']]
elif params['loss'] == obj.mfom_eer_embed.__name__:
loss = obj.mfom_eer_embed
else:
loss = params['loss']
# ===
# choose optimizer
# ===
if params['optimizer'] == 'adam':
optimizer = Adam(lr=params['learn_rate'], beta_1=0.9, beta_2=0.999, epsilon=1e-08)
elif params['optimizer'] == 'sgd':
optimizer = SGD(lr=params['learn_rate'], decay=1e-6, momentum=0.9, nesterov=True)
elif params['optimizer'] == 'adadelta':
optimizer = Adadelta(lr=params['learn_rate'])
elif params['optimizer'] == 'rmsprop':
optimizer = RMSprop(lr=params['learn_rate'])
else:
optimizer = params['optimizer']
self.model = Model(input=input, output=output)
self.model.compile(loss=loss, optimizer=optimizer)
self.model.summary()
| 39.352941 | 122 | 0.603222 | [
"MIT"
] | Vanova/mfom_attribute_detection | src/model/sed_ogits.py | 6,021 | Python |
from core import Symbol
_latin = list('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
# COSINEQ should not be imported as they clash
_greek = 'alpha beta gamma delta epsilon zeta eta theta iota kappa '\
'mu nu xi omicron pi rho sigma tau upsilon phi chi psi omega'.split(' ')
for _s in _latin + _greek:
exec "%s = Symbol('%s')" % (_s, _s)
del _latin, _greek, _s
| 31.583333 | 74 | 0.728232 | [
"BSD-3-Clause"
] | fperez/sympy | sympy/abc.py | 379 | Python |
# Generated by Django 3.1.5 on 2021-02-25 11:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cluster', '0001_initial'),
('manager', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='director',
name='cluster',
field=models.ManyToManyField(related_name='directory', to='cluster.LogicalCluster'),
),
]
| 23.05 | 96 | 0.607375 | [
"Apache-2.0"
] | allegro/vaas | vaas-app/src/vaas/manager/migrations/0002_auto_20210225_1216.py | 461 | Python |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle
import paddle.fluid as fluid
import os
from fleet_meta_optimizer_base import TestFleetMetaOptimizer
from paddle.distributed.fleet.meta_optimizers import RecomputeOptimizer
paddle.enable_static()
class TestFleetRecomputeMetaOptimizer(TestFleetMetaOptimizer):
def test_recompute_optimizer_backward(self):
""" test recompute optimizer backward """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_optimizer_backward_gradients(self):
""" test recompute optimizer backward + gradients """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)
with fluid.program_guard(train_prog, startup_prog):
opt.apply_gradients(params_grads)
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_optimizer_backward_optimize(self):
""" test recompute optimizer backward + optimize """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)
opt.apply_optimize(avg_cost, startup_prog, params_grads)
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_optimizer_backward(self):
""" test recompute optimizer backward """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_optimizer_backward(self):
""" test recompute optimizer backward """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_optimizer(self):
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
self.optimizer(avg_cost, strategy, train_prog, startup_prog)
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_lars_optimizer(self):
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
self.set_strategy(strategy, 'lars')
self.optimizer(avg_cost, strategy, train_prog, startup_prog)
ops = [op.type for op in avg_cost.block.ops]
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
self.assertIn('lars_momentum', ops)
def test_recompute_lamb_optimizer(self):
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
self.set_strategy(strategy, 'lamb')
self.optimizer(avg_cost, strategy, train_prog, startup_prog, 'adam')
ops = [op.type for op in avg_cost.block.ops]
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
self.assertIn('lamb', ops)
def test_recompute_offload(self):
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute-offload')
self.optimizer(avg_cost, strategy, train_prog, startup_prog)
ops = [op.type for op in avg_cost.block.ops]
outs = [
op.output('Out')[0] for op in avg_cost.block.ops
if op.type == 'memcpy'
]
self.assertIn('memcpy', ops)
self.assertIn('@Pinned', ''.join(outs))
self.assertIn('@Fetch', ''.join(outs))
if __name__ == "__main__":
unittest.main()
| 39.283237 | 80 | 0.657004 | [
"Apache-2.0"
] | 0x45f/Paddle | python/paddle/fluid/tests/unittests/test_fleet_recompute_meta_optimizer.py | 6,796 | Python |
__author__ = 'noam'
| 10 | 19 | 0.7 | [
"BSD-3-Clause"
] | noamkatzir/palm-hand-reading | tests/__init__.py | 20 | Python |
name = input().split()
print(name[0], name[1]*(len(name[1]) if len(name[1])!=5 else 4)) | 44 | 64 | 0.602273 | [
"BSD-3-Clause"
] | nithinmanne/kattis | The Noble Houses of Apaxia/noble.py | 88 | Python |
import re
import pandas as pd
import requests
from lxml import html as lhtml
from fake_useragent import UserAgent
import logging
WS_TO_STR = 0
WS_SRC = 1
WS_PATH = 2
WS_CACHE = 3
class WebParse:
websource = {
# Readable Source unique path caching
"mkt_cap" : ['Mkt Cap' , "ycharts" , "market_cap", 0],
"inc_qtr" : ['Inc Qtr' , "ycharts" , "net_income", 1],
"inc_ttm" : ['Inc TTM' , "ycharts" , "net_income_ttm", 1],
"rev_qtr" : ['Rev Qtr' , "ycharts" , "revenues", 1],
"rev_ttm" : ['Rev TTM' , "ycharts" , "revenues_ttm", 1],
"p_rev_ttm" : ['Prv Rev TTM', "ycharts" , "revenues_ttm", 1],
"rev_fy" : ['Rev FY' , "cml" , "analysts", 1],
"ref_1fy" : ['Rev 1FY' , "cml" , "analysts", 1],
"ref_2fy" : ['Rev 2FY' , "cml" , "analysts", 1],
# All PS depends on MktCap and Rev
"ps_fy" : ['PS FY' , "NA"],
"ps_1fy" : ['PS 1FY' , "NA"],
"ps_2fy" : ['PS 2FY' , "NA"],
"ps_ttm" : ['PS TTM' , "NA"],
"ps_nxt" : ['PS Nxt' , "NA"],
# upside and growth are just ratios between 2 numbers in different times
"upside" : ['Upside' , "NA"],
"rev_grow" : ['Rev Grow' , "NA"],
"inc_grow" : ['Inc Grow' , "NA"],
'revgw_fy' : ['RevGw FY' , 'NA'],
'revgw_1fy' : ['RevGw 1FY' , 'NA'],
'revgw_2fy' : ['RevGw_2FY' , 'NA'],
}
# cache the entire http response
cached_web = {}
# handle to portfolio extracted data
pdata = {}
# state to specify whether the latest date is the same
# if so, skip the parses
skip_metric_parse = 0
# fy_idx is for indexing the fiscal year calculation for revenue
fy_idx = 0
# logger
def __init__(self):
self.logger = logging.getLogger('root.' + __name__)
def clear_webcache(self):
self.cached_web = {}
def val_toB(self, istr):
# return value in billion
if istr == 'NA':
val = -1
elif istr[-1] == 'B':
val = float(istr[0:-1].replace(',', ''))
elif istr[-1] == 'T':
val = float(istr[0:-1].replace(',', ''))*1000.0
else: # observed value is in Mill
val = float(istr[0:-1].replace(',', ''))/1000.0
return val
def val_toM(self, istr):
if istr == 'NA':
val = -1
elif istr[-1] == 'B':
val = float(istr[0:-1].replace(',', ''))*1000.0
else:
val = float(istr[0:-1].replace(',', ''))
return val
# Return the full xml, considering caching enabled or not
# if caching is enabled and is present, no need to query the website again
def get_xml(self, **kwargs):
s = kwargs['stock']
m = kwargs['metric']
u = kwargs['url']
key = (s,self.websource[m][WS_PATH])
# check for caching enable
if self.websource[m][WS_CACHE]:
if key in self.cached_web.keys():
self.logger.debug('get cached url = %s' % u)
return self.cached_web[key]
# here, either caching is not enabled, or cache entry is not present
self.logger.debug('get url = %s' % u)
ua = UserAgent()
hdr = {"User-Agent": ua.random}
req = requests.get(u, headers=hdr)
root = lhtml.fromstring(req.content)
# cache if enabled
if self.websource[m][WS_CACHE]:
self.cached_web[key] = root
return root
def check_skip_metric(self, **kwargs):
s = kwargs['stock']
m = kwargs['metric']
if self.skip_metric_parse:
self.logger.debug('{0} - {1} - skipped'.format(s, m))
return 1, self.pdata[s][self.websource[m][WS_TO_STR]]
else:
return 0, 0
def check_gph_skip_metric(self, **kwargs):
s = kwargs['stock']
m = kwargs['metric']
if self.skip_metric_parse:
self.logger.debug('{0} - {1} - skipped'.format(s, m))
return 1, self.pdata[s][self.websource[m][WS_TO_STR] + ' date'], \
self.pdata[s][self.websource[m][WS_TO_STR]]
else:
return 0, 0, 0
def parse_ycharts_pgNameVal(self, **kwargs):
root = self.get_xml(**kwargs)
res = root.xpath("//span[@class='page-name-date']")
stk = kwargs['stock']
metric = kwargs['metric']
if len(res) != 1:
self.logger.error("ERROR: stock %s, %s list not unique, or not available" %
(kwargs['stock'], kwargs['metric']))
return -1
res = res[0].text
[val, date] = res.split(" for ")
val = self.val_toB(val)
try:
if date == self.pdata[stk]['latest']:
self.skip_metric_parse = 1
self.logger.debug('%s latest data matches (%s).. skipping ycharts metric parse' % (stk, date))
# if date is not the same and this is not market cap, that means this is new data..
# empty out the stocks data
elif metric != 'mkt_cap':
self.pdata[stk] = {'Mkt Cap' : self.pdata[stk]['Mkt Cap'], 'latest' : ''}
except KeyError:
pass
return val
def parse_mkt_cap(self, **kwargs):
self.skip_metric_parse = 0
self.fy_idx = 0
retval = self.parse_ycharts_pgNameVal(**kwargs)
return float("{0:.3f}".format(retval))
def parse_rev_ttm(self, **kwargs):
skip, retval = self.check_skip_metric(**kwargs)
if skip:
return retval
retval = self.parse_ycharts_pgNameVal(**kwargs)
return float("{0:.3f}".format(retval))
'''
def parse_inc_qtr(self, **kwargs):
if self.skip_metric_parse:
return self.pdata[kwargs['stock']][kwargs['metric']]
retval = self.parse_ycharts_pgNameVal(**kwargs)
return float("{0:.3f}".format(retval))
def parse_inc_ttm(self, **kwargs):
if self.skip_metric_parse:
return self.pdata[kwargs['stock']][kwargs['metric']]
retval = self.parse_ycharts_pgNameVal(**kwargs)
return float("{0:.3f}".format(retval))
'''
def parse_p_rev_ttm(self, **kwargs):
root = self.get_xml(**kwargs)
td = root.xpath("//td")
# prev ttm is located at TD[8] and TD[9]
# [0][1] is for current quarter
# [2][3] is for prev quarter
# [8][9] is for a year ago
try:
retval = td[9].text.strip()
# return value in billion
retval = self.val_toB(retval)
except IndexError:
retval = -1
return float("{0:.4f}".format(retval))
def parse_rev_nxt_zacks(self, root):
tb = root.xpath("//section[@id='detailed_earnings_estimates']")[0]
hdr = [th.text_content().split('(')[0].strip() for th in tb.xpath('.//th')]
row = [[td.text_content() for td in tr.xpath('.//td')] for tr in tb.xpath('.//tbody/tr')]
# create indexes and proper row
hdr = hdr[1:]
idx = [r[0] for r in row]
row = [r[1:] for r in row]
df = pd.DataFrame(data = row, columns = hdr, index = idx)
val = df['Next Year']['Zacks Consensus Estimate']
retval = self.val_toB(val)
return float("{0:.3f}".format(retval))
def parse_rev_nxt(self, **kwargs):
skip, retval = self.check_skip_metric(**kwargs)
if skip:
return retval
root = self.get_xml(**kwargs)
if self.websource[kwargs['metric']][WS_SRC] == 'yahoo':
retval = self.parse_rev_nxt_yahoo(root)
elif self.websource[kwargs['metric']][WS_SRC] == 'zacks':
retval =self.parse_rev_nxt_zacks(root)
return float("{0:.3f}".format(retval))
'''
parsing from CML
'''
def parse_rev_fy(self, **kwargs):
root = self.get_xml(**kwargs)
# current FY = 7, next = 8, onward
xpath = "//table[@class='responsive']/tbody/tr[{}]/td[@class='mean']".format(self.fy_idx + 7)
res = root.xpath(xpath)[0].text
# returned value is in millions
return self.val_toB(res)
'''
# parsing that requires ratio
# ps = market_cap / rev_ttm
# ps_nxt = market_cap / rev_nxt
# rev_growth = rev_ttm / p_rev_ttm
# upside = rev_nxt / rev_ttm
'''
# helper function to get ratio
def get_two_metrics(self, stk, a, b):
if stk not in self.pdata.keys():
aval = self.parse(stk, a)
bval = self.parse(stk, b)
else:
try:
aval = self.pdata[stk][self.websource[a][WS_TO_STR]]
except KeyError:
aval = self.parse(stk, a)
try:
bval = self.pdata[stk][self.websource[b][WS_TO_STR]]
except KeyError:
bval = self.parse(stk, b)
return aval, bval
# PS TTM is basically mkt_cap/rev_ttm
# if the required data is not present, parse them first
def parse_ps_ttm(self, **kwargs):
skip, retval = self.check_skip_metric(**kwargs)
if skip:
return retval
mkt_cap, rev_ttm = self.get_two_metrics(kwargs['stock'], 'mkt_cap', 'rev_ttm')
retval = mkt_cap / rev_ttm
return float("{0:.3f}".format(retval))
# this is basically market_cap/rev_nxt
def parse_ps_nxt(self, **kwargs):
skip, retval = self.check_skip_metric(**kwargs)
if skip:
return retval
mkt_cap, rev_nxt = self.get_two_metrics(kwargs['stock'], 'mkt_cap', 'rev_nxt')
retval = mkt_cap / rev_nxt
return float("{0:.3f}".format(retval))
# rev growth need the rev_ttm and prev year's rev_ttm
def parse_rev_grow(self, **kwargs):
skip, retval = self.check_skip_metric(**kwargs)
if skip:
return retval
crev_ttm, prev_ttm = self.get_two_metrics(kwargs['stock'], 'rev_ttm', 'p_rev_ttm')
retval = crev_ttm * 100.0 / prev_ttm - 100
return "{0:.0f}%".format(retval)
# upside = rev_nxt / rev_ttm
def parse_upside(self, **kwargs):
skip, retval = self.check_skip_metric(**kwargs)
if skip:
return retval
rev_nxt, rev_ttm = self.get_two_metrics(kwargs['stock'], 'rev_nxt', 'rev_ttm')
retval = rev_nxt * 100.0 / rev_ttm - 100
return "{0:.0f}%".format(retval)
'''
Parse PS that depends on CML website
'''
# ps_fy = market_cap / rev_fy
# rev_fy is not part of the JSON valuation, so we'll always parse it again (from cached web)
def parse_ps_fy(self, **kwargs):
mkt_cap, rev_fy = self.get_two_metrics(kwargs['stock'], 'mkt_cap', 'rev_fy')
retval = mkt_cap / rev_fy
return float("{0:.2f}".format(retval))
def parse_ps_1fy(self, **kwargs):
self.fy_idx = 1
return self.parse_ps_fy(**kwargs)
def parse_ps_2fy(self, **kwargs):
self.fy_idx = 2
return self.parse_ps_fy(**kwargs)
def parse_revgw_fy(self, **kwargs):
curr, nxt = self.get_two_metrics(kwargs['stock'], 'ps_ttm', 'ps_fy')
return '{0:.0f}%'.format((curr-nxt)*100.0 / nxt)
def parse_revgw_1fy(self, **kwargs):
curr, nxt = self.get_two_metrics(kwargs['stock'], 'ps_fy', 'ps_1fy')
return '{0:.0f}%'.format((curr-nxt)*100.0 / nxt)
def parse_revgw_2fy(self, **kwargs):
curr, nxt = self.get_two_metrics(kwargs['stock'], 'ps_1fy', 'ps_2fy')
return '{0:.0f}%'.format((curr-nxt)*100.0 / nxt)
def parse_ycharts_td(self, **kwargs):
"""
Parse ycharts.com, indexing into the 'dataTableBox' id.
Each <tr> will have a pair of <td>: date and value.
Data from ycharts.com is most recent first, so new entry is prepended to the list
to create chronological order.
list[0] = oldest data
list[-1] = newest data
:param kwargs: Passed on to get_xml (contains stock, metric, url)
:return: date: list of dates (string)
:return: val: list of values converted to million
"""
root = self.get_xml(**kwargs)
td = root.xpath("//table[@class='table']")[0].xpath('.//td')
tdlen = len(td)
date, val = [], []
for i in range(0, tdlen, 2):
# if content is 0, skip
if td[i].text_content() == '': continue
if td[i+1].text_content().strip() == '': continue
date = [td[i].text_content()] + date
val = [self.val_toM(td[i+1].text_content().strip())] + val
return date, val
def parse_gph_inc_qtr(self, **kwargs):
skip, date_ls, val_ls = self.check_gph_skip_metric(**kwargs)
if skip:
return date_ls, val_ls
date, val = self.parse_ycharts_td(**kwargs)
return date, val
def parse_gph_inc_ttm(self, **kwargs):
skip, date_ls, val_ls = self.check_gph_skip_metric(**kwargs)
if skip:
return date_ls, val_ls
date, val = self.parse_ycharts_td(**kwargs)
return date, val
def parse_gph_rev_qtr(self, **kwargs):
skip, date_ls, val_ls = self.check_gph_skip_metric(**kwargs)
if skip:
return date_ls, val_ls
date, val = self.parse_ycharts_td(**kwargs)
return date, val
def parse_gph_rev_ttm(self, **kwargs):
skip, date_ls, val_ls = self.check_gph_skip_metric(**kwargs)
if skip:
return date_ls, val_ls
date, val = self.parse_ycharts_td(**kwargs)
return date, val
def parse_gph_metric(self, stk, m):
"""
Parse graph metric
:param stk:
:param m:
:return:
"""
if stk not in self.pdata.keys():
date, val = self.parse(stk, m, fn_type="graph")
else:
try:
date = self.pdata[stk][self.websource[m][WS_TO_STR] + ' date']
val = self.pdata[stk][self.websource[m][WS_TO_STR]]
except KeyError:
date, val = self.parse(stk, m, fn_type='graph')
return date, val
def parse_gph_grow(self, **kwargs):
metric = re.sub("grow", "ttm", kwargs['metric']).lower()
date, val = self.parse_gph_metric(kwargs['stock'], metric)
# can't compute YoY growth if only 4 quarters or less
if len(val) <= 4:
return [], []
retval = [float("{0:.2f}".format(val[i] * 100.0 / val[i-4] - 100)) for i in range(4, len(val))]
retdate = date[4:]
return retdate, retval
def parse_gph_inc_grow(self, **kwargs):
return [], []
def parse_gph_rev_grow(self, **kwargs):
return self.parse_gph_grow(**kwargs)
'''
parser main entry point and helper functions
'''
# pre_parse takes in the metric and give the correct URL to go to
# input : stock, metric
# output : stock, modified metric, proper URL
def pre_parse(self, stock, metric):
wp_metric = re.sub(" ", "_", metric).lower()
try:
mainurl = self.websource[wp_metric][WS_SRC]
if mainurl == 'ycharts':
url = "https://ycharts.com/companies/{}/{}".format(
stock, self.websource[wp_metric][WS_PATH])
elif mainurl == "yahoo":
url = "https://www.finance.yahoo.com/quote/{}/{}".format(
stock, self.websource[wp_metric][WS_PATH])
elif mainurl == "zacks":
url = "https://zacks.com/stock/quote/{}/{}".format(
stock, self.websource[wp_metric][WS_PATH])
elif mainurl == 'cml':
url = 'https://www.cmlviz.com/inc/{1}.php?ticker={0}'.format(
stock, self.websource[wp_metric][WS_PATH])
elif mainurl == 'NA':
url = "NA"
else:
url = None
except KeyError:
url = None
return stock, wp_metric, url
def parse(self, stock, metric, **kwargs):
stock, metric, url = self.pre_parse(stock, metric)
if url == None:
msg = """
ERROR: url returned None from pre_parse
stock: %s; metric: %s
""" % (stock, metric)
print(msg)
return -1
try:
if kwargs['fn_type'] == 'graph':
fn_prefix = "parse_gph_"
else:
raise KeyError
except KeyError:
fn_prefix = "parse_"
try:
func = getattr(self, fn_prefix + metric)
except AttributeError:
print("ERROR: no function: %s" % (fn_prefix + metric))
return -1
return func(stock=stock, metric=metric, url=url)
| 34.945525 | 111 | 0.512137 | [
"MIT"
] | neilc7/ValDashboard | src/webparse.py | 17,962 | Python |
def identity(x):
return x
def always_false(x):
return False
def always_true(x):
return True
def add(x, y):
return x + y
| 9.466667 | 20 | 0.612676 | [
"MIT"
] | apragacz/functoolsplus | tests/functions.py | 142 | Python |
from typing import Optional
from pyspark.sql import Column, DataFrame
from pyspark.sql.functions import from_unixtime, to_timestamp
from spark_auto_mapper.data_types.data_type_base import AutoMapperDataTypeBase
from spark_auto_mapper.helpers.value_parser import AutoMapperValueParser
from spark_auto_mapper.type_definitions.defined_types import AutoMapperNumberInputType
class AutoMapperUnixTimestampType(AutoMapperDataTypeBase):
def __init__(self, value: AutoMapperNumberInputType) -> None:
"""
Converts the value to a timestamp type in Spark
:param value: value
:param formats: (Optional) formats to use for trying to parse the value otherwise uses Spark defaults
"""
super().__init__()
self.value: AutoMapperDataTypeBase = (
value
if isinstance(value, AutoMapperDataTypeBase)
else AutoMapperValueParser.parse_value(value)
)
def get_column_spec(
self, source_df: Optional[DataFrame], current_column: Optional[Column]
) -> Column:
# Convert from unix timestamp
column_spec: Column = to_timestamp(
from_unixtime(
self.value.get_column_spec(
source_df=source_df, current_column=current_column
),
format="yyyy-MM-dd HH:mm:ss",
),
format="yyyy-MM-dd HH:mm:ss",
)
if source_df is not None:
return column_spec
else:
column_spec = self.value.get_column_spec(
source_df=source_df, current_column=current_column
)
return column_spec
| 33.918367 | 109 | 0.661252 | [
"Apache-2.0"
] | icanbwell/SparkAutoMapper | spark_auto_mapper/data_types/unix_timestamp.py | 1,662 | Python |
import numpy as np
import pickle
def unique(seq):
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def softmax(x, temperature=1.0):
exp_x = np.exp(x / temperature)
return exp_x / np.sum(exp_x)
class TextRNN(object):
def __init__(self, hiddenLayers=300, sequenceLength=100):
# Hidden Layers
self.hiddenLayers = hiddenLayers
# Learning Rate
self.learningRate = 2e-3
# Hidden State
self.h = {}
# Internal cursor
self.cursor = 0
# Sequence Length
self.sequenceLength = sequenceLength
def train(self, text, ngrams=7, delimiter=" "):
# Setup delimiter
self.delimiter = delimiter
# Split by delimiter
grams = text.split(delimiter) if delimiter != "" else list(text)
# Setup Data by Ngrams
self.data = [delimiter.join(grams[i:i+ngrams]) for i in range(len(grams))[::ngrams]]
# Get Unique Data
self.uniqueData = unique(self.data)
# Get Vocab Maps
self.indexToGram = {i:gram for i, gram in enumerate(self.uniqueData)}
self.gramToIndex = {gram:i for i, gram in enumerate(self.uniqueData)}
# Get vocab size
self.vocabSize = len(self.uniqueData)
# Setup Inputs
inputs = []
outputs = []
inputGrams = [self.gramToIndex[gram] for gram in self.data]
outputGrams = [self.gramToIndex[gram] for gram in self.data[1:]]
for i, inputGram in enumerate(inputGrams[0:-1]):
X = np.zeros((self.vocabSize, 1))
X[inputGram, 0] = 1
y = np.zeros((self.vocabSize, 1))
y[outputGrams[i], 0] = 1
inputs.append(X)
outputs.append(y)
self.inputs = inputs
self.outputs = outputs
# Input Weights
self.WXZ = np.random.randn(self.hiddenLayers, self.vocabSize) * 0.1 # Update Gate
self.WXR = np.random.randn(self.hiddenLayers, self.vocabSize) * 0.1 # Reset Gate
self.WXC = np.random.randn(self.hiddenLayers, self.vocabSize) * 0.1 # Candidate
# Hidden Layer Weights
self.WHZ = np.random.randn(self.hiddenLayers, self.hiddenLayers) * 0.1 # Update Gate
self.WHR = np.random.randn(self.hiddenLayers, self.hiddenLayers) * 0.1 # Reset Gate
self.WHC = np.random.randn(self.hiddenLayers, self.hiddenLayers) * 0.1 # Candidate Gate
# Biases
self.bC = np.zeros((self.hiddenLayers, 1)) # Candidate Gate
self.bR = np.zeros((self.hiddenLayers, 1)) # Reset Gate
self.bZ = np.zeros((self.hiddenLayers, 1)) # Update Gate
self.bY = np.zeros((self.vocabSize, 1)) # Output
# Output Layer Weights
self.WY = np.random.randn(self.vocabSize, self.hiddenLayers) * 0.1
# Cache for Update
self.dXZM = np.zeros_like(self.WXZ)
self.dXRM = np.zeros_like(self.WXR)
self.dXCM = np.zeros_like(self.WXC)
self.dHZM = np.zeros_like(self.WHZ)
self.dHRM = np.zeros_like(self.WHR)
self.dHCM = np.zeros_like(self.WHC)
self.dbZM = np.zeros_like(self.bZ)
self.dbRM = np.zeros_like(self.bR)
self.dbCM = np.zeros_like(self.bC)
self.dYM = np.zeros_like(self.WY)
self.dXZV = np.zeros_like(self.WXZ)
self.dXRV = np.zeros_like(self.WXR)
self.dXCV = np.zeros_like(self.WXC)
self.dHZV = np.zeros_like(self.WHZ)
self.dHRV = np.zeros_like(self.WHR)
self.dHCV = np.zeros_like(self.WHC)
self.dbZV = np.zeros_like(self.bZ)
self.dbRV = np.zeros_like(self.bR)
self.dbCV = np.zeros_like(self.bC)
self.dYV = np.zeros_like(self.WY)
def forward(self, X, hPrev, temperature=1.0):
# Update Gate
zbar = np.dot(self.WXZ, X) + np.dot(self.WHZ, hPrev) + self.bZ
z = sigmoid(zbar)
# Reset Gate
rbar = np.dot(self.WXR, X) + np.dot(self.WHR, hPrev) + self.bR
r = sigmoid(rbar)
# Candidate
cbar = np.dot(self.WXC, X) + np.dot(self.WHC, np.multiply(r, hPrev)) + self.bC
c = np.tanh(cbar)
# Hidden State
h = np.multiply(c, z) + np.multiply(hPrev, 1 - z)
# h = np.multiply(z, hPrev) + np.multiply((1 - z), c)
# Output
o = softmax(np.dot(self.WY, h) + self.bY, temperature)
return z, zbar, r, rbar, c, cbar, h, o
def step(self):
# Hidden State
self.h = {}
self.h[-1] = np.zeros((self.hiddenLayers, 1))
# Update Gates
z = {}
zbars = {}
# Reset Gates
r = {}
rbars = {}
# Candidates
c = {}
cbars = {}
# Inputs
x = {}
# Outputs
o = {}
# Target Indexes
targets = {}
# Timesteps to Unroll
totalLen = len(self.inputs)
if self.cursor + self.sequenceLength > totalLen:
self.cursor = 0
# Total Loss
loss = 0
for i in xrange(self.sequenceLength):
# Get inputs and outputs
X = self.inputs[self.cursor + i]
y = self.outputs[self.cursor + i]
# Move inputs forward through network
z[i], zbars[i], r[i], rbars[i], c[i], cbars[i], self.h[i], o[i] = self.forward(X, self.h[i - 1])
# Calculate loss
target = np.argmax(y)
loss += -np.log(o[i][target, 0])
x[i] = X
targets[i] = target
# Back Propagation
dXZ = np.zeros_like(self.WXZ)
dXR = np.zeros_like(self.WXR)
dXC = np.zeros_like(self.WXC)
dHZ = np.zeros_like(self.WHZ)
dHR = np.zeros_like(self.WHR)
dHC = np.zeros_like(self.WHC)
dbZ = np.zeros_like(self.bZ)
dbR = np.zeros_like(self.bR)
dbC = np.zeros_like(self.bC)
dbY = np.zeros_like(self.bY)
dY = np.zeros_like(self.WY)
dhnext = np.zeros_like(self.h[0])
dzbarnext = np.zeros_like(zbars[0])
drbarnext = np.zeros_like(rbars[0])
dcbarnext = np.zeros_like(cbars[0])
z[self.sequenceLength] = np.zeros_like(z[0])
r[self.sequenceLength] = np.zeros_like(r[0])
for i in reversed(xrange(self.sequenceLength)):
# Back Propagate Through Y
dSY = np.copy(o[i])
dSY[targets[i]] -= 1
dY += np.dot(dSY, self.h[i].T)
dbY += dSY
# Back Propagate Through H and X
dha = np.multiply(dhnext, 1 - z[i + 1]) # Through Update Gate
dhb = np.dot(self.WHR.T, drbarnext) # Weights into rbar
dhc = np.dot(self.WHZ.T, dzbarnext) # Weights into zbar
dhd = np.multiply(r[i + 1], np.dot(self.WHC.T, dcbarnext)) # Weights into cbar
dhe = np.dot(self.WY.T, dSY) # Weights at output
dh = dha + dhb + dhc + dhd + dhe
dcbar = np.multiply(np.multiply(dh, z[i]) , 1 - np.square(c[i]))
drbar = np.multiply(np.multiply(self.h[i - 1], np.dot(self.WHC.T, dcbar)), np.multiply(r[i] , (1 - r[i])))
dzbar = np.multiply(np.multiply(dh, (c[i] - self.h[i - 1])), np.multiply(z[i], (1 - z[i])))
dXZ += np.dot(dzbar, x[i].T)
dXR += np.dot(drbar, x[i].T)
dXC += np.dot(dcbar, x[i].T)
dHZ += np.dot(dzbar, self.h[i - 1].T)
dHR += np.dot(drbar, self.h[i - 1].T)
dHC += np.dot(dcbar, np.multiply(r[i], self.h[i - 1]).T)
dbZ += dzbar
dbR += drbar
dbC += dcbar
dhnext = dh
drbarnext = drbar
dzbarnext = dzbar
dcbarnext = dcbar
# Parameter Update (Adam)
for param, delta, m, v in zip([self.WXZ, self.WXR, self.WXC, self.WHZ, self.WHR, self.WHC, self.WY, self.bZ, self.bR, self.bC],
[dXZ, dXR, dXC, dHZ, dHR, dHC, dY, dbZ, dbR, dbC],
[self.dXZM, self.dXRM, self.dXCM, self.dHZM, self.dHRM, self.dHCM, self.dYM, self.dbZM, self.dbRM, self.dbCM],
[self.dXZV, self.dXRV, self.dXCV, self.dHZV, self.dHRV, self.dHCV, self.dYV, self.dbZV, self.dbRV, self.dbCV]):
m = 0.9 * m + 0.1 * delta
v = 0.99 * v + 0.01 * (delta ** 2)
param += -self.learningRate * m / (np.sqrt(v) + 1e-8)
# Update cursor
self.cursor += self.sequenceLength
return loss
def sample(self, num=100, temperature=1.0, start=False):
# Output
output = ""
# Sample hidden state
h = {}
h[-1] = np.zeros((self.hiddenLayers, 1))
# Sample Update Gate
z = {}
zbar = {}
# Sample Reset Gate
r = {}
rbar = {}
# Sample Candidate Gate
c = {}
cbar = {}
# Make inputs from seed
if start == False:
lastCursor = self.cursor - self.sequenceLength
seedIdx = lastCursor if lastCursor >= 0 else 0
seed = self.data[seedIdx]
else:
seedIdx = self.gramToIndex[start]
seed = start
X = np.zeros((self.vocabSize, 1))
X[self.gramToIndex[seed], 0] = 1
# Add seed to output
output += seed
# Generate sample
for i in xrange(num - 1):
# Move through network
z[i], zbar[i], r[i], rbar[i], c[i], cbar[i], h[i], prediction = self.forward(X, h[i - 1], temperature)
# Pick ngram using probabilities
idx = np.random.choice(range(self.vocabSize), p=prediction.ravel())
# Add to output
output += self.delimiter + self.indexToGram[idx]
# Update input to feed back in
X = np.zeros((self.vocabSize, 1))
X[idx, 0] = 1
return output
def run(self, iterations=1000, size=100, temperatures=[1.0], sampleFile=False, printSample=5, seed=False):
if sampleFile != False:
sampleFile = open(sampleFile, 'w')
for i in xrange(iterations):
loss = bot.step()
if i % printSample == 0:
for temperature in temperatures:
print '======= Temperature: ' + str(temperature) + ' ======='
sample = bot.sample(size, temperature, seed)
print sample
if(sampleFile != False):
sampleFile.write(sample + '\n\n\n')
print '\n'
print '======= Iteration ' + str(i + 1) + ' ======='
print '======= Samples Seen: ' + str(self.cursor) + ' ======='
print '======= Loss: ' + str(loss) + ' ======='
if sampleFile != False:
sampleFile.close()
def save(self, small=True):
savedObj = {item:value for item, value in self.__dict__.iteritems()}
if small == True:
for param in ["data", "uniqueData", "indexToGram", "gramToIndex", "inputs", "outputs"]:
del savedObj[param]
pickle.dump(savedObj, open("TEXT_RNN_DUMP3", "w+"))
def load(self, dump):
newSelf = pickle.load(dump)
for item, value in newSelf.iteritems():
setattr(self, item, value)
data = open('data.txt').read().lower()
bot = TextRNN()
bot.train(data, 1, '')
bot.run()
bot.save(True)
| 31.807163 | 150 | 0.529188 | [
"MIT"
] | KingPixil/gram-rnn | gru.py | 11,546 | Python |
#
# PySNMP MIB module HUAWEI-RSVPTE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-RSVPTE-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:36:34 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
BitRate, MessageSize, QosService, BurstSize, SessionType = mibBuilder.importSymbols("INTEGRATED-SERVICES-MIB", "BitRate", "MessageSize", "QosService", "BurstSize", "SessionType")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Integer32, Unsigned32, MibIdentifier, Counter64, Counter32, TimeTicks, IpAddress, ModuleIdentity, iso, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, NotificationType, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "Unsigned32", "MibIdentifier", "Counter64", "Counter32", "TimeTicks", "IpAddress", "ModuleIdentity", "iso", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "NotificationType", "Bits")
DisplayString, TruthValue, TimeStamp, TimeInterval, TextualConvention, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "TimeStamp", "TimeInterval", "TextualConvention", "RowStatus")
hwRsvpTe = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148))
hwRsvpTe.setRevisions(('2014-10-25 17:36', '2014-06-16 14:55', '2013-08-28 17:55',))
if mibBuilder.loadTexts: hwRsvpTe.setLastUpdated('201410251736Z')
if mibBuilder.loadTexts: hwRsvpTe.setOrganization('Huawei Technologies Co.,Ltd.')
hwRsvpTeObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1))
hwRsvpTeSessionTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1), )
if mibBuilder.loadTexts: hwRsvpTeSessionTable.setStatus('current')
hwRsvpTeSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1), ).setIndexNames((0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionNumber"))
if mibBuilder.loadTexts: hwRsvpTeSessionEntry.setStatus('current')
hwRsvpTeSessionNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 1), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeSessionNumber.setStatus('current')
hwRsvpTeSessionType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 2), SessionType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionType.setStatus('current')
hwRsvpTeSessionDestAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionDestAddr.setStatus('current')
hwRsvpTeSessionDestAddrLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionDestAddrLength.setStatus('current')
hwRsvpTeSessionSenders = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionSenders.setStatus('current')
hwRsvpTeSessionReceivers = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionReceivers.setStatus('current')
hwRsvpTeSessionRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionRequests.setStatus('current')
hwRsvpTeSessionTunnelId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionTunnelId.setStatus('current')
hwRsvpTeSessionTunnelExtId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 9), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionTunnelExtId.setStatus('current')
hwRsvpTeSessionLspsNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionLspsNumber.setStatus('current')
hwRsvpTeSessionStyle = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(10, 17, 18))).clone(namedValues=NamedValues(("ff", 10), ("wf", 17), ("se", 18)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSessionStyle.setStatus('current')
hwRsvpTeSenderTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2), )
if mibBuilder.loadTexts: hwRsvpTeSenderTable.setStatus('current')
hwRsvpTeSenderEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1), ).setIndexNames((0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderNumber"))
if mibBuilder.loadTexts: hwRsvpTeSenderEntry.setStatus('current')
hwRsvpTeSenderNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 1), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeSenderNumber.setStatus('current')
hwRsvpTeSenderType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 2), SessionType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderType.setStatus('current')
hwRsvpTeSenderDestAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderDestAddr.setStatus('current')
hwRsvpTeSenderAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAddr.setStatus('current')
hwRsvpTeSenderDestAddrLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderDestAddrLength.setStatus('current')
hwRsvpTeSenderAddrLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAddrLength.setStatus('current')
hwRsvpTeSenderHopAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderHopAddr.setStatus('current')
hwRsvpTeSenderHopLih = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderHopLih.setStatus('current')
hwRsvpTeSenderInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderInterface.setStatus('current')
hwRsvpTeSenderTSpecRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 10), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderTSpecRate.setStatus('current')
hwRsvpTeSenderTSpecPeakRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 11), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderTSpecPeakRate.setStatus('current')
hwRsvpTeSenderTSpecBurst = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 12), BurstSize()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderTSpecBurst.setStatus('current')
hwRsvpTeSenderTSpecMinTu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 13), MessageSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderTSpecMinTu.setStatus('current')
hwRsvpTeSenderTSpecMaxTu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 14), MessageSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderTSpecMaxTu.setStatus('current')
hwRsvpTeSenderInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderInterval.setStatus('current')
hwRsvpTeSenderRsvpHop = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 16), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderRsvpHop.setStatus('current')
hwRsvpTeSenderPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 65532))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderPolicy.setStatus('current')
hwRsvpTeSenderAdspecBreak = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 18), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecBreak.setStatus('current')
hwRsvpTeSenderAdspecHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecHopCount.setStatus('current')
hwRsvpTeSenderAdspecPathBw = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 20), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecPathBw.setStatus('current')
hwRsvpTeSenderAdspecMinLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 21), Integer32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecMinLatency.setStatus('current')
hwRsvpTeSenderAdspecMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 22), Integer32()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecMtu.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 23), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedSvc.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedBreak = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 24), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedBreak.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedCtot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 25), Integer32()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedCtot.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedDtot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 26), Integer32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedDtot.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedCsum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 27), Integer32()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedCsum.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedDsum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 28), Integer32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedDsum.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 29), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedHopCount.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedPathBw = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 30), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedPathBw.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedMinLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 31), Integer32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedMinLatency.setStatus('current')
hwRsvpTeSenderAdspecGuaranteedMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 32), Integer32()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecGuaranteedMtu.setStatus('current')
hwRsvpTeSenderAdspecCtrlLoadSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 33), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecCtrlLoadSvc.setStatus('current')
hwRsvpTeSenderAdspecCtrlLoadBreak = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 34), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecCtrlLoadBreak.setStatus('current')
hwRsvpTeSenderAdspecCtrlLoadHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 35), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecCtrlLoadHopCount.setStatus('current')
hwRsvpTeSenderAdspecCtrlLoadPathBw = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 36), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecCtrlLoadPathBw.setStatus('current')
hwRsvpTeSenderAdspecCtrlLoadMinLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 37), Integer32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecCtrlLoadMinLatency.setStatus('current')
hwRsvpTeSenderAdspecCtrlLoadMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 38), Integer32()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderAdspecCtrlLoadMtu.setStatus('current')
hwRsvpTeSenderTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 39), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderTtl.setStatus('current')
hwRsvpTeLspId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 40), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeLspId.setStatus('current')
hwRsvpTeSenderMsgIdSndFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 41), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderMsgIdSndFlag.setStatus('current')
hwRsvpTeSenderMsgIdSndEpoch = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 42), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderMsgIdSndEpoch.setStatus('current')
hwRsvpTeSenderMsgIdSndNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 43), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderMsgIdSndNumber.setStatus('current')
hwRsvpTeSenderMsgIdRcvFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 44), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderMsgIdRcvFlag.setStatus('current')
hwRsvpTeSenderMsgIdRcvEpoch = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 45), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderMsgIdRcvEpoch.setStatus('current')
hwRsvpTeSenderMsgIdRcvNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 46), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderMsgIdRcvNumber.setStatus('current')
hwRsvpTeSenderClassType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 47), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderClassType.setStatus('current')
hwRsvpTeSenderLabelRequestCtype = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 48), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("withoutLabelRange", 1), ("withAtmLabelRange", 2), ("withFrameRelayLabelRange", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderLabelRequestCtype.setStatus('current')
hwRsvpTeSenderLabelRequestL3pid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 49), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderLabelRequestL3pid.setStatus('current')
hwRsvpTeSenderLabelRequestAtmMinVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 50), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderLabelRequestAtmMinVpi.setStatus('current')
hwRsvpTeSenderLabelRequestAtmMinVci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 51), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderLabelRequestAtmMinVci.setStatus('current')
hwRsvpTeSenderLabelRequestAtmMaxVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 52), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderLabelRequestAtmMaxVpi.setStatus('current')
hwRsvpTeSenderLabelRequestAtmMaxVci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 53), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderLabelRequestAtmMaxVci.setStatus('current')
hwRsvpTeSenderLabelRequestFrMinDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 54), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderLabelRequestFrMinDlci.setStatus('current')
hwRsvpTeSenderLabelRequestFrMaxDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 55), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderLabelRequestFrMaxDlci.setStatus('current')
hwRsvpTeSenderSessionAttrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 7))).clone(namedValues=NamedValues(("withRa", 1), ("withoutRa", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderSessionAttrType.setStatus('current')
hwRsvpTeSenderSessionAttrSetupPrio = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 57), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderSessionAttrSetupPrio.setStatus('current')
hwRsvpTeSenderSessionAttrHoldPrio = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 58), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderSessionAttrHoldPrio.setStatus('current')
hwRsvpTeSenderSessionAttrFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderSessionAttrFlag.setStatus('current')
hwRsvpTeSenderSessionAttrName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 60), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderSessionAttrName.setStatus('current')
hwRsvpTeSenderSessionAttrExcludeAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 61), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderSessionAttrExcludeAny.setStatus('current')
hwRsvpTeSenderSessionAttrIncludeAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 62), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderSessionAttrIncludeAny.setStatus('current')
hwRsvpTeSenderSessionAttrIncludeAll = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 63), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderSessionAttrIncludeAll.setStatus('current')
hwRsvpTeSenderFrrSetupPrio = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 64), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrSetupPrio.setStatus('current')
hwRsvpTeSenderFrrHoldPrio = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 65), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrHoldPrio.setStatus('current')
hwRsvpTeSenderFrrHopLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 66), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrHopLimit.setStatus('current')
hwRsvpTeSenderFrrFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("oneToOneDesired", 1), ("facilityDesired", 2), ("noBackupDesired", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrFlag.setStatus('current')
hwRsvpTeSenderFrrBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 68), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrBandwidth.setStatus('current')
hwRsvpTeSenderFrrExcludeAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 69), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrExcludeAny.setStatus('current')
hwRsvpTeSenderFrrIncludeAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 70), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrIncludeAny.setStatus('current')
hwRsvpTeSenderFrrIncludeAll = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 71), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrIncludeAll.setStatus('current')
hwRsvpTeSenderFrrInuseFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 72), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("normal", 1), ("plrInUse", 2), ("mpInUse", 3), ("plrAndMpInUse", 4), ("underProtection", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderFrrInuseFlag.setStatus('current')
hwRsvpTeSenderDiffServPsc = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 2, 1, 73), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeSenderDiffServPsc.setStatus('current')
hwRsvpTeResvTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3), )
if mibBuilder.loadTexts: hwRsvpTeResvTable.setStatus('current')
hwRsvpTeResvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1), ).setIndexNames((0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeResvNumber"))
if mibBuilder.loadTexts: hwRsvpTeResvEntry.setStatus('current')
hwRsvpTeResvNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 1), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeResvNumber.setStatus('current')
hwRsvpTeResvType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 2), SessionType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvType.setStatus('current')
hwRsvpTeResvDestAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvDestAddr.setStatus('current')
hwRsvpTeResvSenderAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvSenderAddr.setStatus('current')
hwRsvpTeResvDestAddrLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvDestAddrLength.setStatus('current')
hwRsvpTeResvSenderAddrLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvSenderAddrLength.setStatus('current')
hwRsvpTeResvHopAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvHopAddr.setStatus('current')
hwRsvpTeResvHopLih = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvHopLih.setStatus('current')
hwRsvpTeResvInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvInterface.setStatus('current')
hwRsvpTeResvService = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 10), QosService()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvService.setStatus('current')
hwRsvpTeResvTSpecRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 11), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvTSpecRate.setStatus('current')
hwRsvpTeResvTSpecPeakRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 12), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvTSpecPeakRate.setStatus('current')
hwRsvpTeResvTSpecBurst = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 13), BurstSize()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvTSpecBurst.setStatus('current')
hwRsvpTeResvTSpecMinTu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 14), MessageSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvTSpecMinTu.setStatus('current')
hwRsvpTeResvTSpecMaxTu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 15), MessageSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvTSpecMaxTu.setStatus('current')
hwRsvpTeResvRSpecRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 16), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvRSpecRate.setStatus('current')
hwRsvpTeResvRSpecSlack = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 17), Integer32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvRSpecSlack.setStatus('current')
hwRsvpTeResvInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvInterval.setStatus('current')
hwRsvpTeResvScope = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvScope.setStatus('current')
hwRsvpTeResvShared = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 20), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvShared.setStatus('current')
hwRsvpTeResvExplicit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 21), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvExplicit.setStatus('current')
hwRsvpTeResvRsvpHop = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 22), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvRsvpHop.setStatus('current')
hwRsvpTeResvPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 23), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvPolicy.setStatus('current')
hwRsvpTeResvTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvTtl.setStatus('current')
hwRsvpTeResvConfirm = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 3, 1, 25), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvConfirm.setStatus('current')
hwRsvpTeResvFwdTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4), )
if mibBuilder.loadTexts: hwRsvpTeResvFwdTable.setStatus('current')
hwRsvpTeResvFwdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1), ).setIndexNames((0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdNumber"))
if mibBuilder.loadTexts: hwRsvpTeResvFwdEntry.setStatus('current')
hwRsvpTeResvFwdNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 1), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeResvFwdNumber.setStatus('current')
hwRsvpTeResvFwdType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 2), SessionType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdType.setStatus('current')
hwRsvpTeResvFwdDestAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdDestAddr.setStatus('current')
hwRsvpTeResvFwdSenderAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdSenderAddr.setStatus('current')
hwRsvpTeResvFwdDestAddrLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdDestAddrLength.setStatus('current')
hwRsvpTeResvFwdSenderAddrLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdSenderAddrLength.setStatus('current')
hwRsvpTeResvFwdHopAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdHopAddr.setStatus('current')
hwRsvpTeResvFwdHopLih = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdHopLih.setStatus('current')
hwRsvpTeResvFwdInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdInterface.setStatus('current')
hwRsvpTeResvFwdService = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 10), QosService()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdService.setStatus('current')
hwRsvpTeResvFwdTSpecRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 11), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdTSpecRate.setStatus('current')
hwRsvpTeResvFwdTSpecPeakRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 12), BitRate()).setUnits('bits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdTSpecPeakRate.setStatus('current')
hwRsvpTeResvFwdTSpecBurst = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 13), BurstSize()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdTSpecBurst.setStatus('current')
hwRsvpTeResvFwdTSpecMinTu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 14), MessageSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdTSpecMinTu.setStatus('current')
hwRsvpTeResvFwdTSpecMaxTu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 15), MessageSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdTSpecMaxTu.setStatus('current')
hwRsvpTeResvFwdRSpecRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 16), BitRate()).setUnits('bytes per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdRSpecRate.setStatus('current')
hwRsvpTeResvFwdRSpecSlack = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 17), Integer32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdRSpecSlack.setStatus('current')
hwRsvpTeResvFwdInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdInterval.setStatus('current')
hwRsvpTeResvFwdScope = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdScope.setStatus('current')
hwRsvpTeResvFwdShared = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 20), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdShared.setStatus('current')
hwRsvpTeResvFwdExplicit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 21), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdExplicit.setStatus('current')
hwRsvpTeResvFwdRsvpHop = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 22), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdRsvpHop.setStatus('current')
hwRsvpTeResvFwdPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 23), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdPolicy.setStatus('current')
hwRsvpTeResvFwdTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdTtl.setStatus('current')
hwRsvpTeResvFwdMsgIdFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 25), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdMsgIdFlag.setStatus('current')
hwRsvpTeResvFwdMsgIdEpoch = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 26), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdMsgIdEpoch.setStatus('current')
hwRsvpTeResvFwdMsgIdNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 4, 1, 27), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeResvFwdMsgIdNumber.setStatus('current')
hwRsvpTeIfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5), )
if mibBuilder.loadTexts: hwRsvpTeIfTable.setStatus('current')
hwRsvpTeIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: hwRsvpTeIfEntry.setStatus('current')
hwRsvpTeIfUdpNbrs = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfUdpNbrs.setStatus('current')
hwRsvpTeIfIpNbrs = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfIpNbrs.setStatus('current')
hwRsvpTeIfNbrs = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfNbrs.setStatus('current')
hwRsvpTeIfRefreshBlockadeMultiple = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfRefreshBlockadeMultiple.setStatus('current')
hwRsvpTeIfRefreshMultiple = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfRefreshMultiple.setStatus('current')
hwRsvpTeIfTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfTtl.setStatus('current')
hwRsvpTeIfRefreshInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 7), TimeInterval()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfRefreshInterval.setStatus('current')
hwRsvpTeIfRouteDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 8), TimeInterval()).setUnits('hundredths of a second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfRouteDelay.setStatus('current')
hwRsvpTeIfEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 9), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfEnabled.setStatus('current')
hwRsvpTeIfUdpRequired = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 10), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfUdpRequired.setStatus('current')
hwRsvpTeIfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 11), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwRsvpTeIfStatus.setStatus('current')
hwRsvpTeIfHelloEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 12), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfHelloEnabled.setStatus('current')
hwRsvpTeIfSrefreshEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 13), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfSrefreshEnabled.setStatus('current')
hwRsvpTeIfSrefreshInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 14), TimeInterval()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfSrefreshInterval.setStatus('current')
hwRsvpTeIfRetranIncDelta = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfRetranIncDelta.setStatus('current')
hwRsvpTeIfRetranInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 16), TimeInterval()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfRetranInterval.setStatus('current')
hwRsvpTeIfAuthEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 17), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfAuthEnabled.setStatus('current')
hwRsvpTeIfAuthEncrypted = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 18), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfAuthEncrypted.setStatus('current')
hwRsvpTeIfAuthHandshake = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 19), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfAuthHandshake.setStatus('current')
hwRsvpTeIfAuthLifeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 20), TimeInterval()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfAuthLifeTime.setStatus('current')
hwRsvpTeIfAuthKey = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 392))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfAuthKey.setStatus('current')
hwRsvpTeIfWindowSize = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 5, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeIfWindowSize.setStatus('current')
hwRsvpTeNbrTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6), )
if mibBuilder.loadTexts: hwRsvpTeNbrTable.setStatus('current')
hwRsvpTeNbrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrAddress"))
if mibBuilder.loadTexts: hwRsvpTeNbrEntry.setStatus('current')
hwRsvpTeNbrAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16)))
if mibBuilder.loadTexts: hwRsvpTeNbrAddress.setStatus('current')
hwRsvpTeNbrProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ip", 1), ("udp", 2), ("both", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrProtocol.setStatus('current')
hwRsvpTeNbrStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwRsvpTeNbrStatus.setStatus('current')
hwRsvpTeNbrSendersNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrSendersNumber.setStatus('current')
hwRsvpTeNbrReceiversNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrReceiversNumber.setStatus('current')
hwRsvpTeNbrHelloEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 6), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrHelloEnabled.setStatus('current')
hwRsvpTeNbrHelloSrcInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrHelloSrcInstance.setStatus('current')
hwRsvpTeNbrHelloDstInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrHelloDstInstance.setStatus('current')
hwRsvpTeNbrHelloLostCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrHelloLostCounter.setStatus('current')
hwRsvpTeNbrHelloType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("request", 1), ("ack", 2), ("none", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrHelloType.setStatus('current')
hwRsvpTeNbrGrCapability = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrGrCapability.setStatus('current')
hwRsvpTeNbrGrRestartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 12), TimeStamp()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrGrRestartTime.setStatus('current')
hwRsvpTeNbrGrRecoveryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 13), TimeStamp()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrGrRecoveryTime.setStatus('current')
hwRsvpTeNbrGrStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("normal", 1), ("supporting", 2), ("restarting", 3), ("restartTimerRunning", 4), ("recoveryTimerRunning", 5), ("grEnd", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrGrStatus.setStatus('current')
hwRsvpTeNbrAuthKeyId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 15), OctetString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(6, 6), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrAuthKeyId.setStatus('current')
hwRsvpTeNbrReductionEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 16), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrReductionEnabled.setStatus('current')
hwRsvpTeNbrReliabilityEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 6, 1, 17), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeNbrReliabilityEnabled.setStatus('current')
hwRsvpTeMessageIdTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 7), )
if mibBuilder.loadTexts: hwRsvpTeMessageIdTable.setStatus('current')
hwRsvpTeMessageIdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 7, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrAddress"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeMessageIdEpoch"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeMessageIdNumber"))
if mibBuilder.loadTexts: hwRsvpTeMessageIdEntry.setStatus('current')
hwRsvpTeMessageIdEpoch = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 7, 1, 1), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeMessageIdEpoch.setStatus('current')
hwRsvpTeMessageIdNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 7, 1, 2), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeMessageIdNumber.setStatus('current')
hwRsvpTeMessageIdFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("senderIncoming", 1), ("senderOutgoing", 2), ("resv", 3), ("resvFwd", 4), ("rtBuff", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeMessageIdFlag.setStatus('current')
hwRsvpTeFilterSpecTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 8), )
if mibBuilder.loadTexts: hwRsvpTeFilterSpecTable.setStatus('current')
hwRsvpTeFilterSpecEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 8, 1), ).setIndexNames((0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeResvNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeFilterSpecNumber"))
if mibBuilder.loadTexts: hwRsvpTeFilterSpecEntry.setStatus('current')
hwRsvpTeFilterSpecNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 8, 1, 1), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeFilterSpecNumber.setStatus('current')
hwRsvpTeFilterSpecLspId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 8, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeFilterSpecLspId.setStatus('current')
hwRsvpTeFilterSpecIngressLsrId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 8, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeFilterSpecIngressLsrId.setStatus('current')
hwRsvpTeFilterSpecLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 8, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeFilterSpecLabel.setStatus('current')
hwRsvpTeRroTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 9), )
if mibBuilder.loadTexts: hwRsvpTeRroTable.setStatus('current')
hwRsvpTeRroEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 9, 1), ).setIndexNames((0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeRroNumber"))
if mibBuilder.loadTexts: hwRsvpTeRroEntry.setStatus('current')
hwRsvpTeRroNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 9, 1, 1), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeRroNumber.setStatus('current')
hwRsvpTeRroType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 9, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2), ("label", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeRroType.setStatus('current')
hwRsvpTeRroIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 9, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeRroIpAddr.setStatus('current')
hwRsvpTeRroIpPrefixLen = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 9, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeRroIpPrefixLen.setStatus('current')
hwRsvpTeRroLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 9, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeRroLabel.setStatus('current')
hwRsvpTeRroFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 9, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeRroFlag.setStatus('current')
hwRsvpTeEroTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 10), )
if mibBuilder.loadTexts: hwRsvpTeEroTable.setStatus('current')
hwRsvpTeEroEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 10, 1), ).setIndexNames((0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderNumber"), (0, "HUAWEI-RSVPTE-MIB", "hwRsvpTeEroNumber"))
if mibBuilder.loadTexts: hwRsvpTeEroEntry.setStatus('current')
hwRsvpTeEroNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 10, 1, 1), Gauge32())
if mibBuilder.loadTexts: hwRsvpTeEroNumber.setStatus('current')
hwRsvpTeEroType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeEroType.setStatus('current')
hwRsvpTeEroIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 10, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeEroIpAddr.setStatus('current')
hwRsvpTeEroIpPrefixLen = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 10, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRsvpTeEroIpPrefixLen.setStatus('current')
hwRsvpTeExtendObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 11))
hwRsvpTeExtendTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12))
hwRsvpTeTrapObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 1))
hwRsvpTeNbr = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 1, 1), IpAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwRsvpTeNbr.setStatus('current')
hwRsvpTeIfNbrCurrentCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 1, 2), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwRsvpTeIfNbrCurrentCount.setStatus('current')
hwRsvpTeIfNbrThreshold = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 1, 3), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwRsvpTeIfNbrThreshold.setStatus('current')
hwRsvpTeIfNbrTotalCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 1, 4), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwRsvpTeIfNbrTotalCount.setStatus('current')
hwRsvpTeIfName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 1, 5), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwRsvpTeIfName.setStatus('current')
hwRsvpTeTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2))
hwRsvpTeHelloLost = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2, 1)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbr"))
if mibBuilder.loadTexts: hwRsvpTeHelloLost.setStatus('current')
hwRsvpTeHelloLostRecovery = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2, 2)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbr"))
if mibBuilder.loadTexts: hwRsvpTeHelloLostRecovery.setStatus('current')
hwRsvpTeAuthFail = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2, 3)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbr"))
if mibBuilder.loadTexts: hwRsvpTeAuthFail.setStatus('current')
hwRsvpTeAuthSuccess = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2, 4)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbr"))
if mibBuilder.loadTexts: hwRsvpTeAuthSuccess.setStatus('current')
hwRsvpTeIfNbrThresholdExceed = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2, 5)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfName"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrCurrentCount"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrThreshold"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrTotalCount"))
if mibBuilder.loadTexts: hwRsvpTeIfNbrThresholdExceed.setStatus('current')
hwRsvpTeIfNbrThresholdExceedClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2, 6)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfName"))
if mibBuilder.loadTexts: hwRsvpTeIfNbrThresholdExceedClear.setStatus('current')
hwRsvpTeIfNbrTotalCountExceed = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2, 7)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfName"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrTotalCount"))
if mibBuilder.loadTexts: hwRsvpTeIfNbrTotalCountExceed.setStatus('current')
hwRsvpTeIfNbrTotalCountExceedClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 1, 12, 2, 8)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfName"))
if mibBuilder.loadTexts: hwRsvpTeIfNbrTotalCountExceedClear.setStatus('current')
hwRsvpTeConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2))
hwRsvpTeGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1))
hwRsvpTeSessionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 1)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionDestAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionDestAddrLength"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionSenders"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionReceivers"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionRequests"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionTunnelId"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionTunnelExtId"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionLspsNumber"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionStyle"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeSessionGroup = hwRsvpTeSessionGroup.setStatus('current')
hwRsvpTeSenderGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 2)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderDestAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderDestAddrLength"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAddrLength"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderHopAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderHopLih"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderInterface"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderTSpecRate"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderTSpecPeakRate"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderTSpecBurst"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderTSpecMinTu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderTSpecMaxTu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderInterval"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderRsvpHop"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderPolicy"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecBreak"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecHopCount"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecPathBw"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecMinLatency"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecMtu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedSvc"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedBreak"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedCtot"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedDtot"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedCsum"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedDsum"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedHopCount"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedPathBw"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedMinLatency"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecGuaranteedMtu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecCtrlLoadSvc"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecCtrlLoadBreak"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecCtrlLoadHopCount"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecCtrlLoadPathBw"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecCtrlLoadMinLatency"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderAdspecCtrlLoadMtu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderTtl"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeLspId"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderMsgIdSndFlag"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderMsgIdSndEpoch"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderMsgIdSndNumber"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderMsgIdRcvFlag"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderMsgIdRcvEpoch"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderMsgIdRcvNumber"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderClassType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderLabelRequestCtype"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderLabelRequestL3pid"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderLabelRequestAtmMinVpi"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderLabelRequestAtmMinVci"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderLabelRequestAtmMaxVpi"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderLabelRequestAtmMaxVci"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderLabelRequestFrMinDlci"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderLabelRequestFrMaxDlci"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderSessionAttrType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderSessionAttrSetupPrio"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderSessionAttrHoldPrio"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderSessionAttrFlag"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderSessionAttrName"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderSessionAttrExcludeAny"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderSessionAttrIncludeAny"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderSessionAttrIncludeAll"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrSetupPrio"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrHoldPrio"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrHopLimit"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrFlag"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrBandwidth"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrExcludeAny"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrIncludeAny"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrIncludeAll"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderFrrInuseFlag"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderDiffServPsc"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeSenderGroup = hwRsvpTeSenderGroup.setStatus('current')
hwRsvpTeResvGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 3)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvDestAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvSenderAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvDestAddrLength"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvSenderAddrLength"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvHopAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvHopLih"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvInterface"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvService"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvTSpecRate"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvTSpecPeakRate"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvTSpecBurst"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvTSpecMinTu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvTSpecMaxTu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvRSpecRate"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvRSpecSlack"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvInterval"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvScope"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvShared"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvExplicit"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvRsvpHop"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvPolicy"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvTtl"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvConfirm"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeResvGroup = hwRsvpTeResvGroup.setStatus('current')
hwRsvpTeResvFwdGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 4)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdDestAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdSenderAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdDestAddrLength"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdSenderAddrLength"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdHopAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdHopLih"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdInterface"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdService"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdTSpecRate"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdTSpecPeakRate"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdTSpecBurst"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdTSpecMinTu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdTSpecMaxTu"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdRSpecRate"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdRSpecSlack"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdInterval"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdScope"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdShared"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdExplicit"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdPolicy"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdTtl"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdMsgIdFlag"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdMsgIdEpoch"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdMsgIdNumber"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdRsvpHop"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeResvFwdGroup = hwRsvpTeResvFwdGroup.setStatus('current')
hwRsvpTeIfGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 5)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfUdpNbrs"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfIpNbrs"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrs"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfRefreshBlockadeMultiple"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfRefreshMultiple"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfTtl"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfRefreshInterval"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfRouteDelay"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfEnabled"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfUdpRequired"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfStatus"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfHelloEnabled"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfSrefreshEnabled"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfSrefreshInterval"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfRetranIncDelta"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfRetranInterval"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfAuthEnabled"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfAuthEncrypted"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfAuthHandshake"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfAuthKey"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfWindowSize"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfAuthLifeTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeIfGroup = hwRsvpTeIfGroup.setStatus('current')
hwRsvpTeNbrGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 6)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrHelloSrcInstance"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrHelloDstInstance"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrHelloLostCounter"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrHelloType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrHelloEnabled"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrSendersNumber"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrReceiversNumber"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrGrCapability"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrGrRestartTime"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrGrRecoveryTime"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrGrStatus"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrAuthKeyId"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrReductionEnabled"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrReliabilityEnabled"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrProtocol"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeNbrGroup = hwRsvpTeNbrGroup.setStatus('current')
hwRsvpTeMessageIdGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 7)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeMessageIdFlag"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeMessageIdGroup = hwRsvpTeMessageIdGroup.setStatus('current')
hwRsvpTeFilterSpecGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 8)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeFilterSpecLspId"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeFilterSpecIngressLsrId"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeFilterSpecLabel"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeFilterSpecGroup = hwRsvpTeFilterSpecGroup.setStatus('current')
hwRsvpTeRroGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 9)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeRroType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeRroIpAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeRroIpPrefixLen"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeRroLabel"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeRroFlag"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeRroGroup = hwRsvpTeRroGroup.setStatus('current')
hwRsvpTeEroGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 10)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeEroType"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeEroIpAddr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeEroIpPrefixLen"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeEroGroup = hwRsvpTeEroGroup.setStatus('current')
hwRsvpTeTrapObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 11)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbr"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrCurrentCount"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrThreshold"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrTotalCount"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeTrapObjectsGroup = hwRsvpTeTrapObjectsGroup.setStatus('current')
hwRsvpTeTrapGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 1, 12)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeHelloLost"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeHelloLostRecovery"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeAuthFail"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeAuthSuccess"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrThresholdExceed"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrThresholdExceedClear"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrTotalCountExceed"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfNbrTotalCountExceedClear"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeTrapGroup = hwRsvpTeTrapGroup.setStatus('current')
hwRsvpTeCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 2))
hwRsvpTeCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 148, 2, 2, 1)).setObjects(("HUAWEI-RSVPTE-MIB", "hwRsvpTeSessionGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeSenderGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeIfGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeNbrGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeFilterSpecGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeRroGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeEroGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeTrapObjectsGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeTrapGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeResvFwdGroup"), ("HUAWEI-RSVPTE-MIB", "hwRsvpTeMessageIdGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwRsvpTeCompliance = hwRsvpTeCompliance.setStatus('current')
mibBuilder.exportSymbols("HUAWEI-RSVPTE-MIB", hwRsvpTeIfIpNbrs=hwRsvpTeIfIpNbrs, hwRsvpTeResvFwdTSpecMaxTu=hwRsvpTeResvFwdTSpecMaxTu, hwRsvpTeSenderAdspecGuaranteedHopCount=hwRsvpTeSenderAdspecGuaranteedHopCount, hwRsvpTeMessageIdFlag=hwRsvpTeMessageIdFlag, hwRsvpTeResvFwdSenderAddr=hwRsvpTeResvFwdSenderAddr, hwRsvpTeCompliances=hwRsvpTeCompliances, hwRsvpTeIfRefreshInterval=hwRsvpTeIfRefreshInterval, hwRsvpTeIfGroup=hwRsvpTeIfGroup, hwRsvpTeSenderSessionAttrIncludeAny=hwRsvpTeSenderSessionAttrIncludeAny, hwRsvpTeResvFwdTable=hwRsvpTeResvFwdTable, hwRsvpTeGroups=hwRsvpTeGroups, hwRsvpTeRroIpPrefixLen=hwRsvpTeRroIpPrefixLen, hwRsvpTeResvFwdSenderAddrLength=hwRsvpTeResvFwdSenderAddrLength, hwRsvpTeEroTable=hwRsvpTeEroTable, hwRsvpTeIfRefreshBlockadeMultiple=hwRsvpTeIfRefreshBlockadeMultiple, hwRsvpTeNbrGrRecoveryTime=hwRsvpTeNbrGrRecoveryTime, hwRsvpTeEroIpAddr=hwRsvpTeEroIpAddr, hwRsvpTeIfRouteDelay=hwRsvpTeIfRouteDelay, hwRsvpTeSenderAdspecCtrlLoadMtu=hwRsvpTeSenderAdspecCtrlLoadMtu, hwRsvpTeSessionRequests=hwRsvpTeSessionRequests, hwRsvpTeSessionSenders=hwRsvpTeSessionSenders, hwRsvpTeSenderEntry=hwRsvpTeSenderEntry, hwRsvpTeSenderRsvpHop=hwRsvpTeSenderRsvpHop, hwRsvpTeTrapGroup=hwRsvpTeTrapGroup, hwRsvpTeIfNbrCurrentCount=hwRsvpTeIfNbrCurrentCount, hwRsvpTeNbrProtocol=hwRsvpTeNbrProtocol, hwRsvpTeMessageIdTable=hwRsvpTeMessageIdTable, hwRsvpTeRroNumber=hwRsvpTeRroNumber, hwRsvpTeSenderLabelRequestFrMinDlci=hwRsvpTeSenderLabelRequestFrMinDlci, hwRsvpTeResvFwdDestAddr=hwRsvpTeResvFwdDestAddr, hwRsvpTeIfStatus=hwRsvpTeIfStatus, hwRsvpTeResvType=hwRsvpTeResvType, hwRsvpTeSessionDestAddr=hwRsvpTeSessionDestAddr, hwRsvpTeResvEntry=hwRsvpTeResvEntry, hwRsvpTeIfAuthEncrypted=hwRsvpTeIfAuthEncrypted, hwRsvpTeRroGroup=hwRsvpTeRroGroup, hwRsvpTeSenderType=hwRsvpTeSenderType, hwRsvpTeSenderFrrIncludeAny=hwRsvpTeSenderFrrIncludeAny, hwRsvpTeSenderSessionAttrType=hwRsvpTeSenderSessionAttrType, hwRsvpTeMessageIdNumber=hwRsvpTeMessageIdNumber, hwRsvpTeSenderLabelRequestAtmMaxVpi=hwRsvpTeSenderLabelRequestAtmMaxVpi, hwRsvpTeFilterSpecIngressLsrId=hwRsvpTeFilterSpecIngressLsrId, hwRsvpTeRroEntry=hwRsvpTeRroEntry, hwRsvpTeResvFwdRSpecRate=hwRsvpTeResvFwdRSpecRate, hwRsvpTe=hwRsvpTe, hwRsvpTeResvFwdHopLih=hwRsvpTeResvFwdHopLih, hwRsvpTeNbrHelloDstInstance=hwRsvpTeNbrHelloDstInstance, hwRsvpTeSessionNumber=hwRsvpTeSessionNumber, hwRsvpTeSessionEntry=hwRsvpTeSessionEntry, hwRsvpTeSenderMsgIdSndNumber=hwRsvpTeSenderMsgIdSndNumber, hwRsvpTeIfUdpNbrs=hwRsvpTeIfUdpNbrs, hwRsvpTeResvShared=hwRsvpTeResvShared, hwRsvpTeSenderAdspecPathBw=hwRsvpTeSenderAdspecPathBw, hwRsvpTeIfRetranInterval=hwRsvpTeIfRetranInterval, hwRsvpTeFilterSpecTable=hwRsvpTeFilterSpecTable, hwRsvpTeResvScope=hwRsvpTeResvScope, hwRsvpTeNbrGroup=hwRsvpTeNbrGroup, hwRsvpTeCompliance=hwRsvpTeCompliance, hwRsvpTeSessionTable=hwRsvpTeSessionTable, hwRsvpTeNbrHelloSrcInstance=hwRsvpTeNbrHelloSrcInstance, hwRsvpTeEroType=hwRsvpTeEroType, hwRsvpTeSenderAdspecGuaranteedMinLatency=hwRsvpTeSenderAdspecGuaranteedMinLatency, hwRsvpTeAuthFail=hwRsvpTeAuthFail, hwRsvpTeSenderFrrInuseFlag=hwRsvpTeSenderFrrInuseFlag, hwRsvpTeSenderMsgIdRcvFlag=hwRsvpTeSenderMsgIdRcvFlag, hwRsvpTeResvFwdTSpecPeakRate=hwRsvpTeResvFwdTSpecPeakRate, hwRsvpTeResvService=hwRsvpTeResvService, hwRsvpTeResvPolicy=hwRsvpTeResvPolicy, hwRsvpTeNbrAuthKeyId=hwRsvpTeNbrAuthKeyId, hwRsvpTeRroLabel=hwRsvpTeRroLabel, hwRsvpTeSenderFrrIncludeAll=hwRsvpTeSenderFrrIncludeAll, hwRsvpTeSenderClassType=hwRsvpTeSenderClassType, hwRsvpTeSenderSessionAttrExcludeAny=hwRsvpTeSenderSessionAttrExcludeAny, hwRsvpTeIfAuthKey=hwRsvpTeIfAuthKey, hwRsvpTeSenderTSpecBurst=hwRsvpTeSenderTSpecBurst, hwRsvpTeIfNbrTotalCount=hwRsvpTeIfNbrTotalCount, hwRsvpTeIfNbrTotalCountExceedClear=hwRsvpTeIfNbrTotalCountExceedClear, hwRsvpTeSenderFrrExcludeAny=hwRsvpTeSenderFrrExcludeAny, hwRsvpTeResvConfirm=hwRsvpTeResvConfirm, hwRsvpTeResvDestAddr=hwRsvpTeResvDestAddr, hwRsvpTeResvFwdShared=hwRsvpTeResvFwdShared, hwRsvpTeHelloLostRecovery=hwRsvpTeHelloLostRecovery, hwRsvpTeResvTSpecRate=hwRsvpTeResvTSpecRate, hwRsvpTeSenderNumber=hwRsvpTeSenderNumber, hwRsvpTeSenderAdspecHopCount=hwRsvpTeSenderAdspecHopCount, hwRsvpTeSessionDestAddrLength=hwRsvpTeSessionDestAddrLength, hwRsvpTeSenderTable=hwRsvpTeSenderTable, hwRsvpTeSenderPolicy=hwRsvpTeSenderPolicy, hwRsvpTeSenderAdspecGuaranteedCtot=hwRsvpTeSenderAdspecGuaranteedCtot, hwRsvpTeResvFwdType=hwRsvpTeResvFwdType, hwRsvpTeNbrEntry=hwRsvpTeNbrEntry, hwRsvpTeSenderHopAddr=hwRsvpTeSenderHopAddr, hwRsvpTeSenderMsgIdSndEpoch=hwRsvpTeSenderMsgIdSndEpoch, hwRsvpTeSenderFrrBandwidth=hwRsvpTeSenderFrrBandwidth, hwRsvpTeSenderTSpecPeakRate=hwRsvpTeSenderTSpecPeakRate, hwRsvpTeSenderAddr=hwRsvpTeSenderAddr, hwRsvpTeSenderFrrHopLimit=hwRsvpTeSenderFrrHopLimit, hwRsvpTeSenderSessionAttrName=hwRsvpTeSenderSessionAttrName, hwRsvpTeResvSenderAddrLength=hwRsvpTeResvSenderAddrLength, hwRsvpTeResvInterface=hwRsvpTeResvInterface, hwRsvpTeResvRsvpHop=hwRsvpTeResvRsvpHop, hwRsvpTeResvFwdExplicit=hwRsvpTeResvFwdExplicit, hwRsvpTeIfTtl=hwRsvpTeIfTtl, hwRsvpTeResvFwdDestAddrLength=hwRsvpTeResvFwdDestAddrLength, hwRsvpTeResvTSpecBurst=hwRsvpTeResvTSpecBurst, hwRsvpTeRroIpAddr=hwRsvpTeRroIpAddr, hwRsvpTeNbrGrRestartTime=hwRsvpTeNbrGrRestartTime, hwRsvpTeResvTSpecMaxTu=hwRsvpTeResvTSpecMaxTu, hwRsvpTeNbr=hwRsvpTeNbr, hwRsvpTeSessionType=hwRsvpTeSessionType, hwRsvpTeIfAuthEnabled=hwRsvpTeIfAuthEnabled, hwRsvpTeFilterSpecLabel=hwRsvpTeFilterSpecLabel, PYSNMP_MODULE_ID=hwRsvpTe, hwRsvpTeResvFwdNumber=hwRsvpTeResvFwdNumber, hwRsvpTeExtendObjects=hwRsvpTeExtendObjects, hwRsvpTeIfName=hwRsvpTeIfName, hwRsvpTeIfSrefreshInterval=hwRsvpTeIfSrefreshInterval, hwRsvpTeSessionLspsNumber=hwRsvpTeSessionLspsNumber, hwRsvpTeSenderAdspecGuaranteedDsum=hwRsvpTeSenderAdspecGuaranteedDsum, hwRsvpTeSenderSessionAttrSetupPrio=hwRsvpTeSenderSessionAttrSetupPrio, hwRsvpTeSenderTSpecRate=hwRsvpTeSenderTSpecRate, hwRsvpTeSenderAdspecGuaranteedDtot=hwRsvpTeSenderAdspecGuaranteedDtot, hwRsvpTeSenderAdspecCtrlLoadSvc=hwRsvpTeSenderAdspecCtrlLoadSvc, hwRsvpTeResvGroup=hwRsvpTeResvGroup, hwRsvpTeSessionGroup=hwRsvpTeSessionGroup, hwRsvpTeRroFlag=hwRsvpTeRroFlag, hwRsvpTeResvExplicit=hwRsvpTeResvExplicit, hwRsvpTeIfNbrThreshold=hwRsvpTeIfNbrThreshold, hwRsvpTeRroTable=hwRsvpTeRroTable, hwRsvpTeRroType=hwRsvpTeRroType, hwRsvpTeSenderDestAddr=hwRsvpTeSenderDestAddr, hwRsvpTeEroEntry=hwRsvpTeEroEntry, hwRsvpTeSenderAdspecCtrlLoadPathBw=hwRsvpTeSenderAdspecCtrlLoadPathBw, hwRsvpTeResvFwdGroup=hwRsvpTeResvFwdGroup, hwRsvpTeTrapObjectsGroup=hwRsvpTeTrapObjectsGroup, hwRsvpTeResvTable=hwRsvpTeResvTable, hwRsvpTeIfRefreshMultiple=hwRsvpTeIfRefreshMultiple, hwRsvpTeSenderGroup=hwRsvpTeSenderGroup, hwRsvpTeFilterSpecGroup=hwRsvpTeFilterSpecGroup, hwRsvpTeEroGroup=hwRsvpTeEroGroup, hwRsvpTeResvSenderAddr=hwRsvpTeResvSenderAddr, hwRsvpTeNbrReceiversNumber=hwRsvpTeNbrReceiversNumber, hwRsvpTeNbrReliabilityEnabled=hwRsvpTeNbrReliabilityEnabled, hwRsvpTeNbrHelloEnabled=hwRsvpTeNbrHelloEnabled, hwRsvpTeNbrGrCapability=hwRsvpTeNbrGrCapability, hwRsvpTeResvTtl=hwRsvpTeResvTtl, hwRsvpTeSenderSessionAttrFlag=hwRsvpTeSenderSessionAttrFlag, hwRsvpTeResvTSpecMinTu=hwRsvpTeResvTSpecMinTu, hwRsvpTeSenderMsgIdRcvEpoch=hwRsvpTeSenderMsgIdRcvEpoch, hwRsvpTeIfWindowSize=hwRsvpTeIfWindowSize, hwRsvpTeSenderDiffServPsc=hwRsvpTeSenderDiffServPsc, hwRsvpTeMessageIdEpoch=hwRsvpTeMessageIdEpoch, hwRsvpTeNbrTable=hwRsvpTeNbrTable, hwRsvpTeNbrGrStatus=hwRsvpTeNbrGrStatus, hwRsvpTeSenderLabelRequestFrMaxDlci=hwRsvpTeSenderLabelRequestFrMaxDlci, hwRsvpTeSessionReceivers=hwRsvpTeSessionReceivers, hwRsvpTeResvFwdScope=hwRsvpTeResvFwdScope, hwRsvpTeSenderAdspecMtu=hwRsvpTeSenderAdspecMtu, hwRsvpTeSenderMsgIdSndFlag=hwRsvpTeSenderMsgIdSndFlag, hwRsvpTeSenderAdspecGuaranteedBreak=hwRsvpTeSenderAdspecGuaranteedBreak, hwRsvpTeResvTSpecPeakRate=hwRsvpTeResvTSpecPeakRate, hwRsvpTeIfRetranIncDelta=hwRsvpTeIfRetranIncDelta, hwRsvpTeSenderFrrFlag=hwRsvpTeSenderFrrFlag, hwRsvpTeResvFwdInterface=hwRsvpTeResvFwdInterface, hwRsvpTeSenderTtl=hwRsvpTeSenderTtl, hwRsvpTeSenderAdspecMinLatency=hwRsvpTeSenderAdspecMinLatency, hwRsvpTeResvFwdTtl=hwRsvpTeResvFwdTtl, hwRsvpTeSenderLabelRequestAtmMinVci=hwRsvpTeSenderLabelRequestAtmMinVci, hwRsvpTeResvFwdService=hwRsvpTeResvFwdService, hwRsvpTeSenderInterface=hwRsvpTeSenderInterface, hwRsvpTeSenderInterval=hwRsvpTeSenderInterval, hwRsvpTeResvFwdRsvpHop=hwRsvpTeResvFwdRsvpHop, hwRsvpTeEroIpPrefixLen=hwRsvpTeEroIpPrefixLen, hwRsvpTeResvFwdEntry=hwRsvpTeResvFwdEntry, hwRsvpTeLspId=hwRsvpTeLspId, hwRsvpTeResvFwdRSpecSlack=hwRsvpTeResvFwdRSpecSlack, hwRsvpTeResvRSpecSlack=hwRsvpTeResvRSpecSlack, hwRsvpTeResvFwdInterval=hwRsvpTeResvFwdInterval, hwRsvpTeResvFwdHopAddr=hwRsvpTeResvFwdHopAddr, hwRsvpTeSenderAdspecCtrlLoadBreak=hwRsvpTeSenderAdspecCtrlLoadBreak, hwRsvpTeResvFwdPolicy=hwRsvpTeResvFwdPolicy, hwRsvpTeConformance=hwRsvpTeConformance, hwRsvpTeSenderAdspecBreak=hwRsvpTeSenderAdspecBreak, hwRsvpTeResvFwdTSpecBurst=hwRsvpTeResvFwdTSpecBurst, hwRsvpTeResvFwdMsgIdNumber=hwRsvpTeResvFwdMsgIdNumber, hwRsvpTeExtendTrap=hwRsvpTeExtendTrap, hwRsvpTeAuthSuccess=hwRsvpTeAuthSuccess, hwRsvpTeFilterSpecNumber=hwRsvpTeFilterSpecNumber, hwRsvpTeIfNbrTotalCountExceed=hwRsvpTeIfNbrTotalCountExceed, hwRsvpTeSenderFrrSetupPrio=hwRsvpTeSenderFrrSetupPrio, hwRsvpTeResvHopLih=hwRsvpTeResvHopLih, hwRsvpTeIfEnabled=hwRsvpTeIfEnabled, hwRsvpTeIfTable=hwRsvpTeIfTable, hwRsvpTeIfHelloEnabled=hwRsvpTeIfHelloEnabled, hwRsvpTeIfAuthLifeTime=hwRsvpTeIfAuthLifeTime, hwRsvpTeSenderMsgIdRcvNumber=hwRsvpTeSenderMsgIdRcvNumber, hwRsvpTeResvFwdTSpecRate=hwRsvpTeResvFwdTSpecRate, hwRsvpTeSenderAdspecGuaranteedPathBw=hwRsvpTeSenderAdspecGuaranteedPathBw, hwRsvpTeResvDestAddrLength=hwRsvpTeResvDestAddrLength, hwRsvpTeNbrHelloLostCounter=hwRsvpTeNbrHelloLostCounter, hwRsvpTeSenderAdspecCtrlLoadHopCount=hwRsvpTeSenderAdspecCtrlLoadHopCount, hwRsvpTeHelloLost=hwRsvpTeHelloLost, hwRsvpTeIfUdpRequired=hwRsvpTeIfUdpRequired, hwRsvpTeNbrReductionEnabled=hwRsvpTeNbrReductionEnabled, hwRsvpTeSessionStyle=hwRsvpTeSessionStyle, hwRsvpTeNbrAddress=hwRsvpTeNbrAddress, hwRsvpTeNbrHelloType=hwRsvpTeNbrHelloType, hwRsvpTeSessionTunnelId=hwRsvpTeSessionTunnelId, hwRsvpTeIfSrefreshEnabled=hwRsvpTeIfSrefreshEnabled, hwRsvpTeEroNumber=hwRsvpTeEroNumber, hwRsvpTeSenderAdspecGuaranteedCsum=hwRsvpTeSenderAdspecGuaranteedCsum, hwRsvpTeSenderSessionAttrHoldPrio=hwRsvpTeSenderSessionAttrHoldPrio, hwRsvpTeSenderLabelRequestAtmMaxVci=hwRsvpTeSenderLabelRequestAtmMaxVci, hwRsvpTeSenderHopLih=hwRsvpTeSenderHopLih, hwRsvpTeFilterSpecLspId=hwRsvpTeFilterSpecLspId, hwRsvpTeSenderSessionAttrIncludeAll=hwRsvpTeSenderSessionAttrIncludeAll, hwRsvpTeSenderLabelRequestL3pid=hwRsvpTeSenderLabelRequestL3pid, hwRsvpTeSenderAdspecGuaranteedMtu=hwRsvpTeSenderAdspecGuaranteedMtu, hwRsvpTeResvNumber=hwRsvpTeResvNumber, hwRsvpTeTrapObjects=hwRsvpTeTrapObjects, hwRsvpTeResvFwdMsgIdEpoch=hwRsvpTeResvFwdMsgIdEpoch, hwRsvpTeSenderDestAddrLength=hwRsvpTeSenderDestAddrLength, hwRsvpTeIfAuthHandshake=hwRsvpTeIfAuthHandshake, hwRsvpTeSenderTSpecMaxTu=hwRsvpTeSenderTSpecMaxTu, hwRsvpTeSenderLabelRequestCtype=hwRsvpTeSenderLabelRequestCtype, hwRsvpTeObjects=hwRsvpTeObjects, hwRsvpTeIfNbrThresholdExceed=hwRsvpTeIfNbrThresholdExceed, hwRsvpTeResvFwdMsgIdFlag=hwRsvpTeResvFwdMsgIdFlag, hwRsvpTeResvInterval=hwRsvpTeResvInterval, hwRsvpTeSessionTunnelExtId=hwRsvpTeSessionTunnelExtId, hwRsvpTeMessageIdGroup=hwRsvpTeMessageIdGroup, hwRsvpTeSenderTSpecMinTu=hwRsvpTeSenderTSpecMinTu, hwRsvpTeResvRSpecRate=hwRsvpTeResvRSpecRate, hwRsvpTeSenderFrrHoldPrio=hwRsvpTeSenderFrrHoldPrio, hwRsvpTeResvFwdTSpecMinTu=hwRsvpTeResvFwdTSpecMinTu, hwRsvpTeNbrSendersNumber=hwRsvpTeNbrSendersNumber, hwRsvpTeIfEntry=hwRsvpTeIfEntry, hwRsvpTeSenderAdspecGuaranteedSvc=hwRsvpTeSenderAdspecGuaranteedSvc, hwRsvpTeMessageIdEntry=hwRsvpTeMessageIdEntry, hwRsvpTeFilterSpecEntry=hwRsvpTeFilterSpecEntry, hwRsvpTeTrap=hwRsvpTeTrap, hwRsvpTeNbrStatus=hwRsvpTeNbrStatus, hwRsvpTeSenderAdspecCtrlLoadMinLatency=hwRsvpTeSenderAdspecCtrlLoadMinLatency, hwRsvpTeIfNbrs=hwRsvpTeIfNbrs, hwRsvpTeIfNbrThresholdExceedClear=hwRsvpTeIfNbrThresholdExceedClear, hwRsvpTeResvHopAddr=hwRsvpTeResvHopAddr, hwRsvpTeSenderLabelRequestAtmMinVpi=hwRsvpTeSenderLabelRequestAtmMinVpi, hwRsvpTeSenderAddrLength=hwRsvpTeSenderAddrLength)
| 149.703846 | 12,243 | 0.770675 | [
"Apache-2.0"
] | agustinhenze/mibs.snmplabs.com | pysnmp/HUAWEI-RSVPTE-MIB.py | 77,846 | Python |
#!/usr/bin/env python
from setuptools import setup
setup(name='BnW',
version='0.1',
description='Microblogging service',
author='Stiletto',
author_email='[email protected]',
url='http://github.com/stiletto/bnw',
packages=['bnw', 'bnw.core', 'bnw.formatting', 'bnw.handlers', 'bnw.scripts', 'bnw.search', 'bnw.web', 'bnw.xmpp'],
dependency_links=['http://github.com/mongodb/motor/tarball/master#egg=motor-0.1.2',
'http://github.com/mongodb/mongo-python-driver/tarball/master#egg=pymongo-2.6',
'https://github.com/stiletto/linkshit/archive/refs/tags/0.2.tar.gz#egg=linkshit-0.2'],
install_requires=['tornado>=2.0,<6.0', 'twisted<16.3.0', 'Pillow<7', 'PyRSS2Gen', 'python-dateutil', 'misaka<2.0.0', 'motor==0.7', 'linkshit', 'libthumbor', 'singledispatch<3.6'],
package_data={'bnw.web': ['templates/*.html','static/*.*', 'static/flot/*', 'static/web-socket-js/*']},
entry_points = {
'console_scripts': [
'bnw = bnw.scripts.entry:instance',
'bnw-search = bnw.scripts.entry:search',
'bnw-admin = bnw.scripts.admin:main',
],
}
)
| 44.807692 | 183 | 0.615451 | [
"BSD-2-Clause"
] | stiletto/bnw | setup.py | 1,165 | Python |
import logging
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
from simple_salesforce import Salesforce
from cumulusci.cli.config import CliRuntime
from cumulusci.core.config import TaskConfig
from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.core.tasks import CURRENT_TASK
from cumulusci.core.utils import import_global
from cumulusci.robotframework.utils import set_pdb_trace
from cumulusci.tasks.robotframework.robotframework import Robot
class CumulusCI(object):
""" Library for accessing CumulusCI for the local git project
This library allows Robot Framework tests to access credentials to a
Salesforce org created by CumulusCI, including Scratch Orgs. It also
exposes the core logic of CumulusCI including interactions with the
Salesforce API's and project specific configuration including custom
and customized tasks and flows.
Initialization requires a single argument, the org name for the target
CumulusCI org. If running your tests via cci's robot task (recommended),
you can initialize the library in your tests taking advantage of the
variable set by the robot task:
| ``*** Settings ***``
|
| Library cumulusci.robotframework.CumulusCI ${ORG}
"""
ROBOT_LIBRARY_SCOPE = "GLOBAL"
def __init__(self, org_name=None):
if not org_name:
org_name = "dev"
self.org_name = org_name
self._project_config = None
self._org = None
self._sf = None
self._tooling = None
# Turn off info logging of all http requests
logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(
logging.WARN
)
@property
def project_config(self):
if self._project_config is None:
if CURRENT_TASK.stack and isinstance(CURRENT_TASK.stack[0], Robot):
# If CumulusCI is running a task, use that task's config
return CURRENT_TASK.stack[0].project_config
else:
logger.console("Initializing CumulusCI config\n")
self._project_config = CliRuntime().project_config
return self._project_config
def set_project_config(self, project_config):
logger.console("\n")
self._project_config = project_config
@property
def keychain(self):
return self.project_config.keychain
@property
def org(self):
if self._org is None:
if CURRENT_TASK.stack and isinstance(CURRENT_TASK.stack[0], Robot):
# If CumulusCI is running a task, use that task's org
return CURRENT_TASK.stack[0].org_config
else:
self._org = self.keychain.get_org(self.org_name)
return self._org
@property
def sf(self):
if self._sf is None:
self._sf = self._init_api()
return self._sf
@property
def tooling(self):
if self._tooling is None:
self._tooling = self._init_api("tooling/")
return self._tooling
def set_login_url(self):
""" Sets the LOGIN_URL variable in the suite scope which will
automatically log into the target Salesforce org.
Typically, this is run during Suite Setup
"""
BuiltIn().set_suite_variable("${LOGIN_URL}", self.org.start_url)
def get_org_info(self):
""" Returns a dictionary of the org information for the current target
Salesforce org
"""
return self.org.config
def login_url(self, org=None):
""" Returns the login url which will automatically log into the target
Salesforce org. By default, the org_name passed to the library
constructor is used but this can be overridden with the org option
to log into a different org.
"""
if org is None:
org = self.org
else:
org = self.keychain.get_org(org)
return org.start_url
def get_namespace_prefix(self, package=None):
""" Returns the namespace prefix (including __) for the specified package name.
(Defaults to project__package__name_managed from the current project config.)
Returns an empty string if the package is not installed as a managed package.
"""
result = ""
if package is None:
package = self.project_config.project__package__name_managed
packages = self.tooling.query(
"SELECT SubscriberPackage.NamespacePrefix, SubscriberPackage.Name "
"FROM InstalledSubscriberPackage"
)
match = [
p for p in packages["records"] if p["SubscriberPackage"]["Name"] == package
]
if match:
result = match[0]["SubscriberPackage"]["NamespacePrefix"] + "__"
return result
def run_task(self, task_name, **options):
""" Runs a named CumulusCI task for the current project with optional
support for overriding task options via kwargs.
Examples:
| =Keyword= | =task_name= | =task_options= | =comment= |
| Run Task | deploy | | Run deploy with standard options |
| Run Task | deploy | path=path/to/some/metadata | Run deploy with custom path |
"""
task_config = self.project_config.get_task(task_name)
class_path = task_config.class_path
logger.console("\n")
task_class, task_config = self._init_task(class_path, options, task_config)
return self._run_task(task_class, task_config)
def run_task_class(self, class_path, **options):
""" Runs a CumulusCI task class with task options via kwargs.
Use this keyword to run logic from CumulusCI tasks which have not
been configured in the project's cumulusci.yml file. This is
most useful in cases where a test needs to use task logic for
logic unique to the test and thus not worth making into a named
task for the project
Examples:
| =Keyword= | =task_class= | =task_options= |
| Run Task Class | cumulusci.task.utils.DownloadZip | url=http://test.com/test.zip dir=test_zip |
"""
logger.console("\n")
task_class, task_config = self._init_task(class_path, options, TaskConfig())
return self._run_task(task_class, task_config)
def _init_api(self, base_url=None):
api_version = self.project_config.project__package__api_version
rv = Salesforce(
instance=self.org.instance_url.replace("https://", ""),
session_id=self.org.access_token,
version=api_version,
)
if base_url is not None:
rv.base_url += base_url
return rv
def _init_task(self, class_path, options, task_config):
task_class = import_global(class_path)
task_config = self._parse_task_options(options, task_class, task_config)
return task_class, task_config
def _parse_task_options(self, options, task_class, task_config):
if "options" not in task_config.config:
task_config.config["options"] = {}
# Parse options and add to task config
if options:
for name, value in options.items():
# Validate the option
if name not in task_class.task_options:
raise TaskOptionsError(
'Option "{}" is not available for task {}'.format(
name, task_class
)
)
# Override the option in the task config
task_config.config["options"][name] = value
return task_config
def _run_task(self, task_class, task_config):
task = task_class(self.project_config, task_config, org_config=self.org)
task()
return task.return_values
def debug(self):
"""Pauses execution and enters the Python debugger."""
set_pdb_trace()
| 38.509346 | 109 | 0.62638 | [
"BSD-3-Clause"
] | jdominiczak/CumulusCI | cumulusci/robotframework/CumulusCI.py | 8,241 | Python |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class ModifyEipAddressAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'ModifyEipAddressAttribute','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_Description(self):
return self.get_query_params().get('Description')
def set_Description(self,Description):
self.add_query_param('Description',Description)
def get_AllocationId(self):
return self.get_query_params().get('AllocationId')
def set_AllocationId(self,AllocationId):
self.add_query_param('AllocationId',AllocationId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_Bandwidth(self):
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self,Bandwidth):
self.add_query_param('Bandwidth',Bandwidth)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name) | 34.05 | 84 | 0.769457 | [
"Apache-2.0"
] | Explorer1092/aliyun-openapi-python-sdk | aliyun-python-sdk-vpc/aliyunsdkvpc/request/v20160428/ModifyEipAddressAttributeRequest.py | 2,724 | Python |
import os
#
# path and dataset parameter
#
#该cfg文件,是通过import的方式进行配置的。并不是main的arg配置
DATA_PATH = 'data'
PASCAL_PATH = os.path.join(DATA_PATH, 'pascal_voc')
CACHE_PATH = os.path.join(PASCAL_PATH, 'cache')
OUTPUT_DIR = os.path.join(PASCAL_PATH, 'output')
WEIGHTS_DIR = os.path.join(PASCAL_PATH, 'weights')
WEIGHTS_FILE = None
# WEIGHTS_FILE = os.path.join(DATA_PATH, 'weights', 'YOLO_small.ckpt')
CLASSES = ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',
'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant', 'sheep', 'sofa',
'train', 'tvmonitor']
FLIPPED = True
#
# model parameter
#
IMAGE_SIZE = 448
CELL_SIZE = 7
BOXES_PER_CELL = 2
ALPHA = 0.1
DISP_CONSOLE = False
OBJECT_SCALE = 1.0
NOOBJECT_SCALE = 1.0
CLASS_SCALE = 2.0
COORD_SCALE = 5.0
#
# solver parameter
#
GPU = ''
LEARNING_RATE = 0.0001
DECAY_STEPS = 30000
DECAY_RATE = 0.1
STAIRCASE = True
BATCH_SIZE = 45
MAX_ITER = 15000
SUMMARY_ITER = 10
SAVE_ITER = 1000
#
# test parameter
#
THRESHOLD = 0.2
IOU_THRESHOLD = 0.5
| 14.012821 | 71 | 0.670631 | [
"MIT"
] | banayoyo/yolo | yolo/config.py | 1,137 | Python |
import os
import matplotlib as mpl
import torch
from data_management import Jitter, load_dataset
from networks import IterativeNet, UNet
from operators import TVAnalysis, get_tikhonov_matrix
# --- load configuration -----
import config # isort:skip
# ----- general setup -----
mpl.use("agg")
device = torch.device("cuda:0")
# ----- operators -----
OpA = config.meas_op(config.m, config.n, device=device, **config.meas_params)
OpTV = TVAnalysis(config.n, device=device)
# ----- build linear inverter ------
reg_fac = 2e-2
inverter = torch.nn.Linear(OpA.m, OpA.n, bias=False)
inverter.weight.requires_grad = False
inverter.weight.data = get_tikhonov_matrix(OpA, OpTV, reg_fac)
# ----- network configuration -----
subnet_params = {
"in_channels": 1,
"out_channels": 1,
"drop_factor": 0.0,
"base_features": 64,
}
subnet = UNet
it_net_params = {
"operator": OpA,
"inverter": inverter,
"num_iter": 8,
"lam": 8 * [0.1],
"lam_learnable": True,
"final_dc": True,
}
# ----- training setup ------
mse_loss = torch.nn.MSELoss(reduction="sum")
def loss_func(pred, tar):
return mse_loss(pred, tar) / pred.shape[0]
train_phases = 2
train_params = {
"num_epochs": [100, 5],
"batch_size": [40, 40],
"loss_func": loss_func,
"save_path": [
os.path.join(
config.RESULTS_PATH,
"unet_it_tikh_jitter_"
"train_phase_{}".format((i + 1) % (train_phases + 1)),
)
for i in range(train_phases + 1)
],
"save_epochs": 1,
"optimizer": torch.optim.Adam,
"optimizer_params": [
{"lr": 5e-5, "eps": 1e-5, "weight_decay": 5e-4},
{"lr": 2e-5, "eps": 1e-5, "weight_decay": 5e-4},
],
"scheduler": torch.optim.lr_scheduler.StepLR,
"scheduler_params": {"step_size": 1, "gamma": 1.0},
"acc_steps": [1, 200],
"train_transform": Jitter(2e0, 0.0, 1.0),
"val_transform": None,
}
# -----data prep -----
X_train, C_train, Y_train = [
tmp.unsqueeze(-2).to(device)
for tmp in load_dataset(config.set_params["path"], subset="train")
]
X_val, C_val, Y_val = [
tmp.unsqueeze(-2).to(device)
for tmp in load_dataset(config.set_params["path"], subset="val")
]
# ------ save hyperparameters -------
os.makedirs(train_params["save_path"][-1], exist_ok=True)
with open(
os.path.join(train_params["save_path"][-1], "hyperparameters.txt"), "w"
) as file:
for key, value in subnet_params.items():
file.write(key + ": " + str(value) + "\n")
for key, value in it_net_params.items():
file.write(key + ": " + str(value) + "\n")
for key, value in train_params.items():
file.write(key + ": " + str(value) + "\n")
file.write("train_phases" + ": " + str(train_phases) + "\n")
# ------ construct network and train -----
subnet = subnet(**subnet_params).to(device)
it_net = IterativeNet(subnet, **it_net_params).to(device)
for i in range(train_phases):
train_params_cur = {}
for key, value in train_params.items():
train_params_cur[key] = (
value[i] if isinstance(value, (tuple, list)) else value
)
print("Phase {}:".format(i + 1))
for key, value in train_params_cur.items():
print(key + ": " + str(value))
it_net.train_on((Y_train, X_train), (Y_val, X_val), **train_params_cur)
| 26.814516 | 77 | 0.616241 | [
"MIT"
] | jmaces/robust-nets | tvsynth/script_train_unet_it_tikh_jitter.py | 3,325 | Python |
N = int(input())
line = []
for a in range(N):
line.append(int(input()))
total = 0
curIter = 1
while min(line) < 999999:
valleys = []
for a in range(N):
if line[a] < 999999:
if (a == 0 or line[a] <= line[a - 1]) and (a == N - 1 or line[a] <= line[a + 1]):
valleys.append(a)
for a in valleys:
line[a] = 999999
total += (curIter * len(valleys))
curIter += 1
print(total) | 20.904762 | 93 | 0.503417 | [
"MIT"
] | MatiwsxD/ayed-2019-1 | labinfo13/Candies.py | 439 | Python |
"""
Tests meant to be run with pytest
"""
import sys
import os
import pytest
from moviepy.editor import *
from moviepy.video.tools.interpolators import Trajectory
import sys
sys.path.append("tests")
import download_media
from test_helper import PYTHON_VERSION, TMP_DIR, TRAVIS
def test_download_media(capsys):
with capsys.disabled():
download_media.download()
def test_PR_306():
if TRAVIS:
return
#put this back in once we get ImageMagick working on travis-ci
assert TextClip.list('font') != []
assert TextClip.list('color') != []
with pytest.raises(Exception, message="Expecting Exception"):
TextClip.list('blah')
def test_PR_339():
if TRAVIS:
return
#in caption mode
overlay = TextClip(txt='foo',
color='white', font="Liberation-Mono",
size=(640, 480),
method='caption',
align='center',
fontsize=25)
#in_label_mode
overlay = TextClip(txt='foo', font="Liberation-Mono", method='label')
def test_PR_373():
result = Trajectory.load_list("media/traj.txt")
Trajectory.save_list(result, os.path.join(TMP_DIR, "traj1.txt"))
result1 = Trajectory.load_list(os.path.join(TMP_DIR,"traj1.txt"))
assert len(result[0].tt) == len(result1[0].tt)
for i in range(len(result[0].tt)):
assert result[0].tt[i] == result1[0].tt[i]
assert len(result[0].xx) == len(result1[0].xx)
for i in range(len(result[0].xx)):
assert result[0].xx[i] == result1[0].xx[i]
assert len(result[0].yy) == len(result1[0].yy)
for i in range(len(result[0].yy)):
assert result[0].yy[i] == result1[0].yy[i]
def test_PR_424():
# Recommended use
clip = ColorClip([1000, 600], color=(60, 60, 60), duration=10)
# Uses `col` so should work the same as above, but give warning
clip = ColorClip([1000, 600], col=(60, 60, 60), duration=10)
# Should give 2 warnings and use `color`, not `col`
clip = ColorClip([1000, 600], color=(60, 60, 60), duration=10, col=(2,2,2))
def test_PR_458():
clip = ColorClip([1000, 600], color=(60, 60, 60), duration=10)
clip.write_videofile(os.path.join(TMP_DIR, "test.mp4"),
progress_bar=False, fps=30)
def test_PR_515():
# Won't actually work until video is in download_media
clip = VideoFileClip("media/fire2.mp4", fps_source='tbr')
assert clip.fps == 90000
clip = VideoFileClip("media/fire2.mp4", fps_source='fps')
assert clip.fps == 10.51
def test_PR_529():
video_clip = VideoFileClip("media/fire2.mp4")
assert video_clip.rotation ==180
if __name__ == '__main__':
pytest.main()
| 27.77551 | 79 | 0.630051 | [
"Apache-2.0"
] | L0dz/auto-post | env/Lib/site-packages/tests/test_PR.py | 2,722 | Python |
import cv2
import matplotlib.pyplot as plt
import numpy as np
img= cv2.imread("img.png")
img=cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
plt.axis('off')
# show the image
plt.imshow(img)
plt.show()
# get the image shape
rows, cols, dim = img.shape
rows, cols, dim = img.shape
# transformation matrix for Shearing
# shearing applied to x-axis
M1 = np.float32([[1, 0.5, 0],
[0, 1 , 0],
[0, 0 , 1]])
# shearing applied to y-axis
M2 = np.float32([[1, 0, 0],
[0.5, 1, 0],
[0, 0, 1]])
# apply a perspective transformation to the image
sheared_img_in_x = cv2.warpPerspective(img,M1,(int(cols*1.5),int(rows*1.5)))
sheared_img_in_y = cv2.warpPerspective(img,M2,(int(cols*1.5),int(rows*1.5)))
# disable x & y axis
plt.axis('off')
# show the resulting image
plt.subplot(121)
plt.imshow(sheared_img_in_x)
plt.subplot(122)
plt.imshow(sheared_img_in_y)
plt.show() | 27.424242 | 76 | 0.658564 | [
"MIT"
] | Payal197bhadra/ComputerVision | OpenCV-Computer-Vision-Examples-with-Python-A-Complete-Guide-for-Dummies-master/Source Code/opencv/Affine Transformation/shearing.py | 905 | Python |
from dataclasses import dataclass, field
from typing import Dict, Optional
@dataclass
class FooType:
class Meta:
name = "fooType"
foo_ele1: Optional[str] = field(
default=None,
metadata={
"name": "fooEle1",
"type": "Element",
"namespace": "",
"required": True,
}
)
foo_ele2: Optional[int] = field(
default=None,
metadata={
"name": "fooEle2",
"type": "Element",
"namespace": "",
"required": True,
}
)
foo_ele3: Optional[bool] = field(
default=None,
metadata={
"name": "fooEle3",
"type": "Element",
"namespace": "",
}
)
other_attributes: Dict[str, str] = field(
default_factory=dict,
metadata={
"type": "Attributes",
"namespace": "##other",
}
)
@dataclass
class FooTest(FooType):
class Meta:
name = "fooTest"
@dataclass
class MyType(FooType):
class Meta:
name = "myType"
@dataclass
class Root:
class Meta:
name = "root"
foo_test: Optional[FooTest] = field(
default=None,
metadata={
"name": "fooTest",
"type": "Element",
"required": True,
}
)
| 19.285714 | 45 | 0.479259 | [
"MIT"
] | tefra/xsdata-w3c-tests | output/models/ms_data/complex_type/ct_i028_xsd/ct_i028.py | 1,350 | Python |
__author__ = 'Gobin'
from redditcli.api import base
class Account(base.Resource):
resource_name = 'Account'
class AccountManager(base.ResourceManager):
resource_class = Account
def me(self):
return self._get('/api/v1/me')
def getkarma(self):
return self._get('/api/v1/me/karma')
def getfriends(self):
return self._get('/api/v1/me/friends', 'data')
def getprefs(self):
return self._get('/api/v1/me/prefs')
def gettrophies(self):
return self._get('/api/v1/me/trophies') | 20.961538 | 54 | 0.645872 | [
"Apache-2.0"
] | gobins/python-oauth2 | redditcli/api/account.py | 545 | Python |
import os
import numpy as np
import warnings
#import SimpleITK as sitk
import cv2
from scipy import misc
from scipy import ndimage
def load_image_from_folder(folder_path, new_size, HE=False, Truc=False, Aug=False):
"""loads images in the folder_path and returns a ndarray and threshold the label image"""
image_list = []
label_list = []
#counter = 0
for image_name in os.listdir(folder_path):
image_original = np.load(folder_path + image_name)
image_original = image_original['a']
#if image_original.shape[0] != 320:
# continue
#counter = counter + 1
#print image_name, counter
image_ct = image_original[:, 0:len(image_original)]
image_spect = image_original[:,len(image_original):len(image_original)*2]
label = image_original[:,len(image_original)*2:len(image_original)*3]
#image_ct = cv2.resize(image_ct, new_size)
#image_spect = cv2.resize(image_spect, new_size)
#label = cv2.resize(label, new_size)
#activate below for binary-class segmentation
#super_threshold_indices = label != 0
#label[super_threshold_indices] = 255
#label = label / 255.0
if HE == True:
image_ct = cv2.equalizeHist(image_ct)
image_spect = cv2.equalizeHist(image_spect)
elif Truc == True:
clahe = cv2.createCLAHE(clipLimit=0.1, tileGridSize=(8,8))
image_spect = clahe.apply(image_spect)
image_ct = clahe.apply(image_ct)
#ret, image = cv2.threshold(image,200,255,cv2.THRESH_TRUNC)
else:
image_spect = image_spect
image_ct = image_ct
#image augmentation method in the FusionNet paper
if Aug == True:
'''SPECT'''
imageSPECT_aug_1 = ndimage.rotate(image_spect, -90)
imageSPECT_aug_2 = np.flipud(imageSPECT_aug_1)
imageSPECT_aug_3 = ndimage.rotate(image_spect, -180)
imageSPECT_aug_4 = np.flipud(imageSPECT_aug_3)
imageSPECT_aug_5 = ndimage.rotate(image_spect, -270)
imageSPECT_aug_6 = np.flipud(imageSPECT_aug_5)
imageSPECT_aug_7 = np.flipud(image_spect)
'''CT'''
imageCT_aug_1 = ndimage.rotate(image_ct, -90)
imageCT_aug_2 = np.flipud(imageCT_aug_1)
imageCT_aug_3 = ndimage.rotate(image_ct, -180)
imageCT_aug_4 = np.flipud(imageCT_aug_3)
imageCT_aug_5 = ndimage.rotate(image_ct, -270)
imageCT_aug_6 = np.flipud(imageCT_aug_5)
imageCT_aug_7 = np.flipud(image_ct)
'''label'''
label_aug_1 = ndimage.rotate(label, -90)
label_aug_1 = label_aug_1.astype(int)
label_aug_2 = np.flipud(label_aug_1)
label_aug_2 = label_aug_2.astype(int)
label_aug_3 = ndimage.rotate(label, -180)
label_aug_3 = label_aug_3.astype(int)
label_aug_4 = np.flipud(label_aug_3)
label_aug_4 = label_aug_4.astype(int)
label_aug_5 = ndimage.rotate(label, -270)
label_aug_5 = label_aug_5.astype(int)
label_aug_6 = np.flipud(label_aug_5)
label_aug_6 = label_aug_6.astype(int)
label_aug_7 = np.flipud(label)
label_aug_7 = label_aug_7.astype(int)
image_all_0 = np.concatenate((image_ct,image_spect),axis=1)
image_all_1 = np.concatenate((imageCT_aug_1, imageSPECT_aug_1), axis=1)
image_all_2 = np.concatenate((imageCT_aug_2, imageSPECT_aug_2), axis=1)
image_all_3 = np.concatenate((imageCT_aug_3, imageSPECT_aug_3), axis=1)
image_all_4 = np.concatenate((imageCT_aug_4, imageSPECT_aug_4), axis=1)
image_all_5 = np.concatenate((imageCT_aug_5, imageSPECT_aug_5), axis=1)
image_all_6 = np.concatenate((imageCT_aug_6, imageSPECT_aug_6), axis=1)
image_all_7 = np.concatenate((imageCT_aug_7, imageSPECT_aug_7), axis=1)
image_list.append(image_all_0)
image_list.append(image_all_1)
image_list.append(image_all_2)
image_list.append(image_all_3)
image_list.append(image_all_4)
image_list.append(image_all_5)
image_list.append(image_all_6)
image_list.append(image_all_7)
label_list.append(label)
label_list.append(label_aug_1)
label_list.append(label_aug_2)
label_list.append(label_aug_3)
label_list.append(label_aug_4)
label_list.append(label_aug_5)
label_list.append(label_aug_6)
label_list.append(label_aug_7)
else:
image_all = np.concatenate((image_ct, image_spect), axis=1)
image_list.append(image_all)
label_list.append(label)
image_array = np.asarray(image_list)
label_array = np.asarray(label_list)
return image_array, label_array
def load_test_from_folder(folder_path, new_size, HE=False, Truc=False, Aug=False):
"""loads images in the folder_path and returns a ndarray and threshold the label image"""
image_list = []
#counter = 0
for image_name in os.listdir(folder_path):
image_original = np.load(folder_path + image_name)
image_original = image_original['a']
#counter = counter + 1
#print image_name, counter
image_ct = image_original[:, 0:len(image_original)]
image_spect = image_original[:,len(image_original):len(image_original)*2]
image_all = np.concatenate((image_ct, image_spect), axis=1)
image_list.append(image_all)
image_array = np.asarray(image_list)
return image_array | 41.405797 | 93 | 0.645082 | [
"MIT"
] | junyuchen245/NM-Img-Denoising-DIP-Keras | DIPDenoising/image_reading.py | 5,714 | Python |
#MIT License
#Copyright (c) 2021 OXYOP
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
from pyrogram.handlers import InlineQueryHandler
from youtubesearchpython import VideosSearch
from utils import USERNAME
from pyrogram.types import InlineQueryResultArticle, InputTextMessageContent, InlineKeyboardButton, InlineKeyboardMarkup
from pyrogram import Client, errors
from config import Config
REPLY_MESSAGE=Config.REPLY_MESSAGE
buttons = [
[
InlineKeyboardButton('⚡️Make Own Bot', url='https://heroku.com/deploy?template=https://github.com/OxyNotOp/OxyPlayer'),
InlineKeyboardButton('🧩 Source Code', url='https://github.com/OxyNotOp/OxyPlayer'),
],
[
InlineKeyboardButton('🎧Play Music', url=f'https://t.me/{USERNAME}'),
InlineKeyboardButton('👨🏼🦯 Help', callback_data='help')
]
]
@Client.on_inline_query()
async def search(client, query):
answers = []
if query.query == "ORU_MANDAN_PM_VANNU":
answers.append(
InlineQueryResultArticle(
title="Deploy",
input_message_content=InputTextMessageContent(f"{REPLY_MESSAGE}\n\n<b>You can't use this bot in your group, for that you have to make your own bot from the [SOURCE CODE](https://github.com/OxyNotOp/OxyPlayer) below.</b>", disable_web_page_preview=True),
reply_markup=InlineKeyboardMarkup(buttons)
)
)
await query.answer(results=answers, cache_time=0)
return
string = query.query.lower().strip().rstrip()
if string == "":
await client.answer_inline_query(
query.id,
results=answers,
switch_pm_text=("Search a youtube video"),
switch_pm_parameter="help",
cache_time=0
)
else:
videosSearch = VideosSearch(string.lower(), limit=50)
for v in videosSearch.result()["result"]:
answers.append(
InlineQueryResultArticle(
title=v["title"],
description=("Duration: {} Views: {}").format(
v["duration"],
v["viewCount"]["short"]
),
input_message_content=InputTextMessageContent(
"/play https://www.youtube.com/watch?v={}".format(
v["id"]
)
),
thumb_url=v["thumbnails"][0]["url"]
)
)
try:
await query.answer(
results=answers,
cache_time=0
)
except errors.QueryIdInvalid:
await query.answer(
results=answers,
cache_time=0,
switch_pm_text=("Nothing found"),
switch_pm_parameter="",
)
__handlers__ = [
[
InlineQueryHandler(
search
)
]
]
| 39.42 | 269 | 0.624556 | [
"MIT"
] | OxyNotOp/OxyPlayer | plugins/inline.py | 3,963 | Python |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'SigningJobDestinationArgs',
'SigningJobDestinationS3Args',
'SigningJobRevocationRecordArgs',
'SigningJobSignedObjectArgs',
'SigningJobSignedObjectS3Args',
'SigningJobSourceArgs',
'SigningJobSourceS3Args',
'SigningProfileRevocationRecordArgs',
'SigningProfileSignatureValidityPeriodArgs',
]
@pulumi.input_type
class SigningJobDestinationArgs:
def __init__(__self__, *,
s3: pulumi.Input['SigningJobDestinationS3Args']):
"""
:param pulumi.Input['SigningJobDestinationS3Args'] s3: A configuration block describing the S3 Destination object: See S3 Destination below for details.
"""
pulumi.set(__self__, "s3", s3)
@property
@pulumi.getter
def s3(self) -> pulumi.Input['SigningJobDestinationS3Args']:
"""
A configuration block describing the S3 Destination object: See S3 Destination below for details.
"""
return pulumi.get(self, "s3")
@s3.setter
def s3(self, value: pulumi.Input['SigningJobDestinationS3Args']):
pulumi.set(self, "s3", value)
@pulumi.input_type
class SigningJobDestinationS3Args:
def __init__(__self__, *,
bucket: pulumi.Input[str],
prefix: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] bucket: Name of the S3 bucket.
:param pulumi.Input[str] prefix: An Amazon S3 object key prefix that you can use to limit signed objects keys to begin with the specified prefix.
"""
pulumi.set(__self__, "bucket", bucket)
if prefix is not None:
pulumi.set(__self__, "prefix", prefix)
@property
@pulumi.getter
def bucket(self) -> pulumi.Input[str]:
"""
Name of the S3 bucket.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: pulumi.Input[str]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter
def prefix(self) -> Optional[pulumi.Input[str]]:
"""
An Amazon S3 object key prefix that you can use to limit signed objects keys to begin with the specified prefix.
"""
return pulumi.get(self, "prefix")
@prefix.setter
def prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "prefix", value)
@pulumi.input_type
class SigningJobRevocationRecordArgs:
def __init__(__self__, *,
reason: Optional[pulumi.Input[str]] = None,
revoked_at: Optional[pulumi.Input[str]] = None,
revoked_by: Optional[pulumi.Input[str]] = None):
if reason is not None:
pulumi.set(__self__, "reason", reason)
if revoked_at is not None:
pulumi.set(__self__, "revoked_at", revoked_at)
if revoked_by is not None:
pulumi.set(__self__, "revoked_by", revoked_by)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@property
@pulumi.getter(name="revokedAt")
def revoked_at(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "revoked_at")
@revoked_at.setter
def revoked_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "revoked_at", value)
@property
@pulumi.getter(name="revokedBy")
def revoked_by(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "revoked_by")
@revoked_by.setter
def revoked_by(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "revoked_by", value)
@pulumi.input_type
class SigningJobSignedObjectArgs:
def __init__(__self__, *,
s3s: Optional[pulumi.Input[Sequence[pulumi.Input['SigningJobSignedObjectS3Args']]]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input['SigningJobSignedObjectS3Args']]] s3s: A configuration block describing the S3 Destination object: See S3 Destination below for details.
"""
if s3s is not None:
pulumi.set(__self__, "s3s", s3s)
@property
@pulumi.getter
def s3s(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SigningJobSignedObjectS3Args']]]]:
"""
A configuration block describing the S3 Destination object: See S3 Destination below for details.
"""
return pulumi.get(self, "s3s")
@s3s.setter
def s3s(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SigningJobSignedObjectS3Args']]]]):
pulumi.set(self, "s3s", value)
@pulumi.input_type
class SigningJobSignedObjectS3Args:
def __init__(__self__, *,
bucket: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] bucket: Name of the S3 bucket.
:param pulumi.Input[str] key: Key name of the object that contains your unsigned code.
"""
if bucket is not None:
pulumi.set(__self__, "bucket", bucket)
if key is not None:
pulumi.set(__self__, "key", key)
@property
@pulumi.getter
def bucket(self) -> Optional[pulumi.Input[str]]:
"""
Name of the S3 bucket.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
Key name of the object that contains your unsigned code.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@pulumi.input_type
class SigningJobSourceArgs:
def __init__(__self__, *,
s3: pulumi.Input['SigningJobSourceS3Args']):
"""
:param pulumi.Input['SigningJobSourceS3Args'] s3: A configuration block describing the S3 Destination object: See S3 Destination below for details.
"""
pulumi.set(__self__, "s3", s3)
@property
@pulumi.getter
def s3(self) -> pulumi.Input['SigningJobSourceS3Args']:
"""
A configuration block describing the S3 Destination object: See S3 Destination below for details.
"""
return pulumi.get(self, "s3")
@s3.setter
def s3(self, value: pulumi.Input['SigningJobSourceS3Args']):
pulumi.set(self, "s3", value)
@pulumi.input_type
class SigningJobSourceS3Args:
def __init__(__self__, *,
bucket: pulumi.Input[str],
key: pulumi.Input[str],
version: pulumi.Input[str]):
"""
:param pulumi.Input[str] bucket: Name of the S3 bucket.
:param pulumi.Input[str] key: Key name of the object that contains your unsigned code.
:param pulumi.Input[str] version: Version of your source image in your version enabled S3 bucket.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def bucket(self) -> pulumi.Input[str]:
"""
Name of the S3 bucket.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: pulumi.Input[str]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
"""
Key name of the object that contains your unsigned code.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def version(self) -> pulumi.Input[str]:
"""
Version of your source image in your version enabled S3 bucket.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: pulumi.Input[str]):
pulumi.set(self, "version", value)
@pulumi.input_type
class SigningProfileRevocationRecordArgs:
def __init__(__self__, *,
revocation_effective_from: Optional[pulumi.Input[str]] = None,
revoked_at: Optional[pulumi.Input[str]] = None,
revoked_by: Optional[pulumi.Input[str]] = None):
if revocation_effective_from is not None:
pulumi.set(__self__, "revocation_effective_from", revocation_effective_from)
if revoked_at is not None:
pulumi.set(__self__, "revoked_at", revoked_at)
if revoked_by is not None:
pulumi.set(__self__, "revoked_by", revoked_by)
@property
@pulumi.getter(name="revocationEffectiveFrom")
def revocation_effective_from(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "revocation_effective_from")
@revocation_effective_from.setter
def revocation_effective_from(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "revocation_effective_from", value)
@property
@pulumi.getter(name="revokedAt")
def revoked_at(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "revoked_at")
@revoked_at.setter
def revoked_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "revoked_at", value)
@property
@pulumi.getter(name="revokedBy")
def revoked_by(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "revoked_by")
@revoked_by.setter
def revoked_by(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "revoked_by", value)
@pulumi.input_type
class SigningProfileSignatureValidityPeriodArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
value: pulumi.Input[int]):
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[int]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[int]):
pulumi.set(self, "value", value)
| 32.820122 | 186 | 0.636693 | [
"ECL-2.0",
"Apache-2.0"
] | chivandikwa/pulumi-aws | sdk/python/pulumi_aws/signer/_inputs.py | 10,765 | Python |
import discord
async def get_role_based_on_reputation(self, guild, ranked_amount):
if ranked_amount >= 10:
return await get_role_from_db(self, "experienced_mapper", guild)
elif ranked_amount >= 1:
return await get_role_from_db(self, "ranked_mapper", guild)
else:
return await get_role_from_db(self, "mapper", guild)
async def get_role_from_db(self, setting, guild):
async with self.bot.db.execute("SELECT role_id FROM roles WHERE setting = ? AND guild_id = ?",
[setting, int(guild.id)]) as cursor:
role_id = await cursor.fetchone()
return discord.utils.get(guild.roles, id=int(role_id[0]))
| 37.833333 | 98 | 0.675477 | [
"MIT"
] | Kyuunex/Seija | seija/reusables/verification.py | 681 | Python |
from .station import consistant_typical_range_stations
def stations_level_over_threshold(stations: list, tol: float) -> list:
"""function takes in stations and returns a list of tuples contating station and
relative water lever where the relative water level greater than tol """
stations = consistant_typical_range_stations(stations) # gets consistant stations
res_list = []
for station in stations:
rel_level = station.relative_water_level()
if rel_level is not None: # ensures water level is not None
if rel_level > tol:
res_list.append((station, rel_level))
return res_list
def stations_highest_rel_level(stations, N):
"""Returns a list of N MonitoringStation objects ordered from highest to lowest risk"""
stations = consistant_typical_range_stations(stations)
def key(x):
if x.relative_water_level() is not None:
return x.relative_water_level()
else:
return float(0)
stationByHighestLevel = sorted(stations, key=key, reverse=True) # Hoping this will work we shall see
NstationByLevel = stationByHighestLevel[:N]
return NstationByLevel
| 33 | 105 | 0.710438 | [
"MIT"
] | LakeeSiv/Flood | floodsystem/flood.py | 1,188 | Python |
import pytest
import sys
import os
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf'))
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib')))
@pytest.fixture
def valid_parking_address(network='mainnet'):
return 'yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Ui' if (network == 'testnet') else 'XpjStRH8SgA6PjgebtPZqCa9y7hLXP767n'
@pytest.fixture
def invalid_parking_address(network='mainnet'):
return 'yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Uj' if (network == 'testnet') else 'XpjStRH8SgA6PjgebtPZqCa9y7hLXP767m'
@pytest.fixture
def current_block_hash():
return '000001c9ba1df5a1c58a4e458fb6febfe9329b1947802cd60a4ae90dd754b534'
@pytest.fixture
def mn_list():
from masternode import Masternode
masternodelist_full = {
u'701854b26809343704ab31d1c45abc08f9f83c5c2bd503a9d5716ef3c0cda857-1': u' ENABLED 70201 yjaFS6dudxUTxYPTDB9BYd1Nv4vMJXm3vK 1474157572 82842 1474152618 71111 52.90.74.124:19999',
u'f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56-1': u' ENABLED 70201 yUuAsYCnG5XrjgsGvRwcDqPhgLUnzNfe8L 1474157732 1590425 1474155175 71122 [2604:a880:800:a1::9b:0]:19999',
u'656695ed867e193490261bea74783f0a39329ff634a10a9fb6f131807eeca744-1': u' ENABLED 70201 yepN97UoBLoP2hzWnwWGRVTcWtw1niKwcB 1474157704 824622 1474152571 71110 178.62.203.249:19999',
}
mnlist = [Masternode(vin, mnstring) for (vin, mnstring) in masternodelist_full.items()]
return mnlist
@pytest.fixture
def mn_status_good():
# valid masternode status enabled & running
status = {
"vin": "CTxIn(COutPoint(f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56, 1), scriptSig=)",
"service": "[2604:a880:800:a1::9b:0]:19999",
"pubkey": "yUuAsYCnG5XrjgsGvRwcDqPhgLUnzNfe8L",
"status": "Masternode successfully started"
}
return status
@pytest.fixture
def mn_status_bad():
# valid masternode but not running/waiting
status = {
"vin": "CTxIn(COutPoint(0000000000000000000000000000000000000000000000000000000000000000, 4294967295), coinbase )",
"service": "[::]:0",
"status": "Node just started, not yet activated"
}
return status
# ========================================================================
def test_valid_parking_address():
from parkinglib import is_valid_parking_address
main = valid_parking_address()
test = valid_parking_address('testnet')
assert is_valid_parking_address(main) is True
assert is_valid_parking_address(main, 'mainnet') is True
assert is_valid_parking_address(main, 'testnet') is False
assert is_valid_parking_address(test) is False
assert is_valid_parking_address(test, 'mainnet') is False
assert is_valid_parking_address(test, 'testnet') is True
def test_invalid_parking_address():
from parkinglib import is_valid_parking_address
main = invalid_parking_address()
test = invalid_parking_address('testnet')
assert is_valid_parking_address(main) is False
assert is_valid_parking_address(main, 'mainnet') is False
assert is_valid_parking_address(main, 'testnet') is False
assert is_valid_parking_address(test) is False
assert is_valid_parking_address(test, 'mainnet') is False
assert is_valid_parking_address(test, 'testnet') is False
def test_deterministic_masternode_elections(current_block_hash, mn_list):
winner = elect_mn(block_hash=current_block_hash, mnlist=mn_list)
assert winner == 'f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56-1'
winner = elect_mn(block_hash='00000056bcd579fa3dc9a1ee41e8124a4891dcf2661aa3c07cc582bfb63b52b9', mnlist=mn_list)
assert winner == '656695ed867e193490261bea74783f0a39329ff634a10a9fb6f131807eeca744-1'
def test_deterministic_masternode_elections(current_block_hash, mn_list):
from parkinglib import elect_mn
winner = elect_mn(block_hash=current_block_hash, mnlist=mn_list)
assert winner == 'f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56-1'
winner = elect_mn(block_hash='00000056bcd579fa3dc9a1ee41e8124a4891dcf2661aa3c07cc582bfb63b52b9', mnlist=mn_list)
assert winner == '656695ed867e193490261bea74783f0a39329ff634a10a9fb6f131807eeca744-1'
def test_parse_masternode_status_vin():
from parkinglib import parse_masternode_status_vin
status = mn_status_good()
vin = parse_masternode_status_vin(status['vin'])
assert vin == 'f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56-1'
status = mn_status_bad()
vin = parse_masternode_status_vin(status['vin'])
assert vin is None
def test_hash_function():
import parkinglib
sb_data_hex = '7b226576656e745f626c6f636b5f686569676874223a2037323639362c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e7473223a202232352e37353030303030307c32352e3735303030303030222c202274797065223a20327d'
sb_hash = '7ae8b02730113382ea75cbb1eecc497c3aa1fdd9e76e875e38617e07fb2cb21a'
hex_hash = "%x" % parkinglib.hashit(sb_data_hex)
assert hex_hash == sb_hash
def test_blocks_to_seconds():
import parkinglib
from decimal import Decimal
precision = Decimal('0.001')
assert Decimal(parkinglib.blocks_to_seconds(0)) == Decimal(0.0)
assert Decimal(parkinglib.blocks_to_seconds(2)).quantize(precision) \
== Decimal(314.4).quantize(precision)
assert int(parkinglib.blocks_to_seconds(16616)) == 2612035
| 40.070922 | 380 | 0.777699 | [
"MIT"
] | hexter2018/sentinel | test/unit/test_parkingy_things.py | 5,650 | Python |
# Fetches and displays a basic candlestick app.
import dash
import plotly.graph_objects as go
import plotly.express as px
import dash_core_components as dcc
import dash_html_components as html
from dash_table import DataTable, FormatTemplate
from utils import *
from datetime import date, timedelta
from math import ceil
from backtest import *
from bloomberg_functions import req_historical_data
import numpy as np
from sklearn import linear_model
from statistics import mean
# Create a Dash app
app = dash.Dash(__name__)
# Create the page layout
app.layout = html.Div([
html.H1(
'Trading Strategy Example Template',
style={'display': 'block', 'text-align': 'center'}
),
html.Div([
html.H2('Strategy'),
html.P('This app explores a simple strategy that works as follows:'),
html.Ol([
html.Li([
"While the market is not open, retrieve the past N days' " + \
"worth of data for:",
html.Ul([
html.Li("IVV: daily open, high, low, & close prices"),
html.Li(
"US Treasury CMT Rates for 1 mo, 2 mo, 3 mo, 6 mo, " + \
"1 yr and 2 yr maturities."
)
])
]),
html.Li([
'Fit a linear trend line through the yield curve defined ' + \
'by the CMT rates and record in a dataframe:',
html.Ul([
html.Li('the y-intercept ("a")'),
html.Li('the slope ("b")'),
html.Li('the coefficient of determination ("R^2")')
]),
'...for the fitted line.'
]),
html.Li(
'Repeat 2. for past CMT data to create a FEATURES ' + \
'dataframe containing historical values of a, b, and R^2 '
),
html.Li(
'Add volatility of day-over-day log returns of IVV ' + \
'closing prices -- observed over the past N days -- to ' + \
'each historical data row in the FEATURES dataframe.'
),
html.Li(
'Add RESPONSE data to the historical FEATURES dataframe.' + \
'The RESPONSE data includes information that communicates ' + \
'whether when, and how a limit order to SELL IVV at a ' + \
'price equal to (IVV Open Price of Next Trading Day) * ' + \
'(1 + alpha) would have filled over the next n trading days.'
),
html.Li(
'Using the features a, b, R^2, and IVV vol alongside the ' + \
'RESPONSE data for the past N observed trading days, ' + \
'train a logistic regression. Use it to predict whether a ' + \
'limit order to SELL IVV at a price equal to (IVV Open ' + \
'Price of Next Trading Day) * (1 + alpha) would have ' + \
'filled over the next n trading days.'
),
html.Li(
'If the regression in 6. predicts TRUE, submit two trades:'),
html.Ul([
html.Li(
'A market order to BUY lot_size shares of IVV, which ' + \
'fills at open price the next trading day.'
),
html.Li(
'A limit order to SELL lot_size shares of IVV at ' + \
'(next day\'s opening price * (1+alpha)'
)
]),
html.Li(
'If the limit order does not fill after n days, issue a ' + \
'market order to sell lot_size shares of IVV at close of ' + \
'the nth day.'
)
])
],
style={'display': 'inline-block', 'width': '50%'}
),
html.Div([
html.H2('Data Note & Disclaimer'),
html.P(
'This Dash app makes use of Bloomberg\'s Python API to append ' + \
'the latest historical data to what\'s already provided in the ' + \
'.csv files in the directory \'bbg_data\'. These initial data ' + \
'files were compiled using publicly available information on ' + \
'the Internet and do not contain historical stock market data ' + \
'from Bloomberg. This app does NOT need a Bloomberg ' + \
'subscription to work -- only to update data. Always know and ' + \
'obey your data stewardship obligations!'
),
html.H2('Parameters'),
html.Ol([
html.Li(
"n: number of days a limit order to exit a position is " + \
"kept open"
),
html.Li(
"N: number of observed historical trading days to use in " + \
"training the logistic regression model."
),
html.Li(
'alpha: a percentage in numeric form ' + \
'(e.g., "0.02" == "2%") that defines the profit sought by ' + \
'entering a trade; for example, if IVV is bought at ' + \
'price X, then a limit order to sell the shares will be put' + \
' in place at a price = X*(1+alpha)'
),
html.Li(
'lot_size: number of shares traded in each round-trip ' + \
'trade. Kept constant for simplicity.'
),
html.Li(
'date_range: Date range over which to perform the backtest.'
)
]),
html.Div(
[
html.Div(
[
html.Button(
"RUN BACKTEST", id='run-backtest', n_clicks=0
),
html.Table(
[html.Tr([
html.Th('Alpha'), html.Th('Beta'),
html.Th('Geometric Mean Return'),
html.Th('Average Trades per Year'),
html.Th('Volatility'), html.Th('Sharpe')
])] + [html.Tr([
html.Td(html.Div(id='strategy-alpha')),
html.Td(html.Div(id='strategy-beta')),
html.Td(html.Div(id='strategy-gmrr')),
html.Td(html.Div(id='strategy-trades-per-yr')),
html.Td(html.Div(id='strategy-vol')),
html.Td(html.Div(id='strategy-sharpe'))
])],
className='main-summary-table'
),
html.Table(
# Header
[html.Tr([
html.Th('Date Range'),
html.Th('Bloomberg Identifier'),
html.Th('n'), html.Th('N'), html.Th('alpha'),
html.Th('Lot Size'),
html.Th('Starting Cash')
])] +
# Body
[html.Tr([
html.Td(
dcc.DatePickerRange(
id='hist-data-range',
min_date_allowed=date(2015, 1, 1),
max_date_allowed=date.today(),
initial_visible_month=date.today(),
start_date=date(2019, 3, 16),
end_date=date(2021, 4, 12)
)
),
html.Td(dcc.Input(
id='bbg-identifier-1', type="text",
value="IVV US Equity",
style={'text-align': 'center'}
)),
html.Td(
dcc.Input(
id='lil-n', type="number", value=5,
style={'text-align': 'center',
'width': '30px'}
)
),
html.Td(
dcc.Input(
id='big-N', type="number", value=10,
style={'text-align': 'center',
'width': '50px'}
)
),
html.Td(
dcc.Input(
id="alpha", type="number", value=0.02,
style={'text-align': 'center',
'width': '50px'}
)
),
html.Td(
dcc.Input(
id="lot-size", type="number", value=100,
style={'text-align': 'center',
'width': '50px'}
)
),
html.Td(
dcc.Input(
id="starting-cash", type="number",
value=50000,
style={'text-align': 'center',
'width': '100px'}
)
)
])]
)
],
style={'display': 'inline-block', 'width': '50%'}
)
],
style={'display': 'block'}
)
],
style={
'display': 'inline-block', 'width': '50%', 'vertical-align': 'top'
}
),
##### Intermediate Variables (hidden in divs as JSON) ######################
############################################################################
# Hidden div inside the app that stores IVV historical data
html.Div(id='ivv-hist', style={'display': 'none'}),
# Hidden div inside the app that stores bonds historical data
html.Div(id='bonds-hist', style={'display': 'none'}),
############################################################################
############################################################################
html.Div(
[dcc.Graph(id='alpha-beta')],
style={'display': 'inline-block', 'width': '50%'}
),
# Display the current selected date range
html.Div(id='date-range-output'),
html.Div([
html.H2(
'Trade Ledger',
style={
'display': 'inline-block', 'text-align': 'center',
'width': '100%'
}
),
DataTable(
id='trade-ledger',
fixed_rows={'headers': True},
style_cell={'textAlign': 'center'},
style_table={'height': '300px', 'overflowY': 'auto'}
)
]),
html.Div([
html.Div([
html.H2(
'Calendar Ledger',
style={
'display': 'inline-block', 'width': '45%',
'text-align': 'center'
}
),
html.H2(
'Trade Blotter',
style={
'display': 'inline-block', 'width': '55%',
'text-align': 'center'
}
)
]),
html.Div(
DataTable(
id='calendar-ledger',
fixed_rows={'headers': True},
style_cell={'textAlign': 'center'},
style_table={'height': '300px', 'overflowY': 'auto'}
),
style={'display': 'inline-block', 'width': '45%'}
),
html.Div(
DataTable(
id='blotter',
fixed_rows={'headers': True},
style_cell={'textAlign': 'center'},
style_table={'height': '300px', 'overflowY': 'auto'}
),
style={'display': 'inline-block', 'width': '55%'}
)
]),
html.Div([
html.H2(
'Features and Responses',
style={
'display': 'inline-block', 'text-align': 'center',
'width': '100%'
}
),
DataTable(
id='features-and-responses',
fixed_rows={'headers': True},
style_cell={'textAlign': 'center'},
style_table={'height': '300px', 'overflowY': 'auto'}
)
]),
html.Div([
html.Div(
dcc.Graph(id='bonds-3d-graph', style={'display': 'none'}),
style={'display': 'inline-block', 'width': '50%'}
),
html.Div(
dcc.Graph(id='candlestick', style={'display': 'none'}),
style={'display': 'inline-block', 'width': '50%'}
)
]),
html.Div(id='proposed-trade'),
############################################################################
############################################################################
])
@app.callback(
#### Update Historical Bloomberg Data
[dash.dependencies.Output('ivv-hist', 'children'),
dash.dependencies.Output('date-range-output', 'children'),
dash.dependencies.Output('candlestick', 'figure'),
dash.dependencies.Output('candlestick', 'style')],
dash.dependencies.Input("run-backtest", 'n_clicks'),
[dash.dependencies.State("bbg-identifier-1", "value"),
dash.dependencies.State("big-N", "value"),
dash.dependencies.State("lil-n", "value"),
dash.dependencies.State('hist-data-range', 'start_date'),
dash.dependencies.State('hist-data-range', 'end_date')],
prevent_initial_call=True
)
def update_bbg_data(nclicks, bbg_id_1, N, n, start_date, end_date):
# Need to query enough days to run the backtest on every date in the
# range start_date to end_date
start_date = pd.to_datetime(start_date).date() - timedelta(
days=ceil((N + n) * (365 / 252))
)
start_date = start_date.strftime("%Y-%m-%d")
historical_data = req_historical_data(bbg_id_1, start_date, end_date)
date_output_msg = 'Backtesting from '
if start_date is not None:
start_date_object = date.fromisoformat(start_date)
start_date_string = start_date_object.strftime('%B %d, %Y')
date_output_msg = date_output_msg + 'Start Date: ' + \
start_date_string + ' to '
if end_date is not None:
end_date_object = date.fromisoformat(end_date)
end_date_string = end_date_object.strftime('%B %d, %Y')
date_output_msg = date_output_msg + 'End Date: ' + end_date_string
if len(date_output_msg) == len('You have selected: '):
date_output_msg = 'Select a date to see it displayed here'
fig = go.Figure(
data=[
go.Candlestick(
x=historical_data['Date'],
open=historical_data['Open'],
high=historical_data['High'],
low=historical_data['Low'],
close=historical_data['Close']
)
]
)
return historical_data.to_json(), date_output_msg, fig, {'display': 'block'}
@app.callback(
[dash.dependencies.Output('bonds-hist', 'children'),
dash.dependencies.Output('bonds-3d-graph', 'figure'),
dash.dependencies.Output('bonds-3d-graph', 'style')],
dash.dependencies.Input("run-backtest", 'n_clicks'),
[dash.dependencies.State('hist-data-range', 'start_date'),
dash.dependencies.State('hist-data-range', 'end_date'),
dash.dependencies.State('big-N', 'value'),
dash.dependencies.State('lil-n', 'value')
],
prevent_initial_call=True
)
def update_bonds_hist(n_clicks, startDate, endDate, N, n):
# Need to query enough days to run the backtest on every date in the
# range start_date to end_date
startDate = pd.to_datetime(startDate).date() - timedelta(
days=ceil((N + n) * (365 / 252))
)
startDate = startDate.strftime("%Y-%m-%d")
data_years = list(
range(pd.to_datetime(startDate).date().year,
pd.to_datetime(endDate).date().year + 1, 1)
)
bonds_data = fetch_usdt_rates(data_years[0])
if len(data_years) > 1:
for year in data_years[1:]:
bonds_data = pd.concat([bonds_data, fetch_usdt_rates(year)],
axis=0, ignore_index=True)
# How to filter a dataframe for rows that you want
bonds_data = bonds_data[bonds_data.Date >= pd.to_datetime(startDate)]
bonds_data = bonds_data[bonds_data.Date <= pd.to_datetime(endDate)]
fig = go.Figure(
data=[
go.Surface(
z=bonds_data,
y=bonds_data.Date,
x=[
to_years(cmt_colname) for cmt_colname in list(
filter(lambda x: ' ' in x, bonds_data.columns.values)
)
]
)
]
)
fig.update_layout(
scene=dict(
xaxis_title='Maturity (years)',
yaxis_title='Date',
zaxis_title='APR (%)',
zaxis=dict(ticksuffix='%')
)
)
bonds_data.reset_index(drop=True, inplace=True)
return bonds_data.to_json(), fig, {'display': 'block'}
@app.callback(
[
dash.dependencies.Output('features-and-responses', 'data'),
dash.dependencies.Output('features-and-responses', 'columns'),
dash.dependencies.Output('blotter', 'data'),
dash.dependencies.Output('blotter', 'columns'),
dash.dependencies.Output('calendar-ledger', 'data'),
dash.dependencies.Output('calendar-ledger', 'columns'),
dash.dependencies.Output('trade-ledger', 'data'),
dash.dependencies.Output('trade-ledger', 'columns')
],
[dash.dependencies.Input('ivv-hist', 'children'),
dash.dependencies.Input('bonds-hist', 'children'),
dash.dependencies.Input('lil-n', 'value'),
dash.dependencies.Input('big-N', 'value'),
dash.dependencies.Input('alpha', 'value'),
dash.dependencies.Input('lot-size', 'value'),
dash.dependencies.Input('starting-cash', 'value'),
dash.dependencies.State('hist-data-range', 'start_date'),
dash.dependencies.State('hist-data-range', 'end_date')],
prevent_initial_call=True
)
def calculate_backtest(ivv_hist, bonds_hist, n, N, alpha, lot_size,
starting_cash, start_date, end_date):
features_and_responses, blotter, calendar_ledger, trade_ledger = backtest(
ivv_hist, bonds_hist, n, N, alpha, lot_size, start_date, end_date,
starting_cash
)
features_and_responses_columns = [
{"name": i, "id": i} for i in features_and_responses.columns
]
features_and_responses = features_and_responses.to_dict('records')
blotter = blotter.to_dict('records')
blotter_columns = [
dict(id='ID', name='ID'),
dict(id='ls', name='long/short'),
dict(id='submitted', name='Created'),
dict(id='action', name='Action'),
dict(id='size', name='Size'),
dict(id='symbol', name='Symb'),
dict(
id='price', name='Order Price', type='numeric',
format=FormatTemplate.money(2)
),
dict(id='type', name='Type'),
dict(id='status', name='Status'),
dict(id='fill_price', name='Fill Price', type='numeric',
format=FormatTemplate.money(2)
),
dict(id='filled_or_cancelled', name='Filled/Cancelled')
]
calendar_ledger = calendar_ledger.to_dict('records')
calendar_ledger_columns = [
dict(id='Date', name='Date'),
dict(id='position', name='position'),
dict(id='ivv_close', name='IVV Close', type='numeric',
format=FormatTemplate.money(2)),
dict(id='cash', name='Cash', type='numeric',
format=FormatTemplate.money(2)),
dict(id='stock_value', name='Stock Value', type='numeric',
format=FormatTemplate.money(2)),
dict(id='total_value', name='Total Value', type='numeric',
format=FormatTemplate.money(2))
]
trade_ledger = trade_ledger.to_dict('records')
trade_ledger_columns = [
dict(id='trade_id', name="ID"),
dict(id='open_dt', name='Trade Opened'),
dict(id='close_dt', name='Trade Closed'),
dict(id='trading_days_open', name='Trading Days Open'),
dict(id='buy_price', name='Entry Price', type='numeric',
format=FormatTemplate.money(2)),
dict(id='sell_price', name='Exit Price', type='numeric',
format=FormatTemplate.money(2)),
dict(id='benchmark_buy_price', name='Benchmark Buy Price',
type='numeric', format=FormatTemplate.money(2)),
dict(id='benchmark_sell_price', name='Benchmark sell Price',
type='numeric', format=FormatTemplate.money(2)),
dict(id='trade_rtn', name='Return on Trade', type='numeric',
format=FormatTemplate.percentage(3)),
dict(id='benchmark_rtn', name='Benchmark Return', type='numeric',
format=FormatTemplate.percentage(3)),
dict(id='trade_rtn_per_trading_day', name='Trade Rtn / trd day',
type='numeric', format=FormatTemplate.percentage(3)),
dict(id='benchmark_rtn_per_trading_day', name='Benchmark Rtn / trd day',
type='numeric', format=FormatTemplate.percentage(3))
]
return features_and_responses, features_and_responses_columns, blotter, \
blotter_columns, calendar_ledger, calendar_ledger_columns, \
trade_ledger, trade_ledger_columns
@app.callback(
[
dash.dependencies.Output('alpha-beta', 'figure'),
dash.dependencies.Output('strategy-alpha', 'children'),
dash.dependencies.Output('strategy-beta', 'children'),
dash.dependencies.Output('strategy-gmrr', 'children'),
dash.dependencies.Output('strategy-trades-per-yr', 'children'),
dash.dependencies.Output('strategy-vol', 'children'),
dash.dependencies.Output('strategy-sharpe', 'children')
],
dash.dependencies.Input('trade-ledger', 'data'),
prevent_initial_call=True
)
def update_performance_metrics(trade_ledger):
trade_ledger = pd.DataFrame(trade_ledger)
trade_ledger = trade_ledger[1:]
X = trade_ledger['benchmark_rtn_per_trading_day'].values.reshape(-1, 1)
linreg_model = linear_model.LinearRegression()
linreg_model.fit(X, trade_ledger['trade_rtn_per_trading_day'])
x_range = np.linspace(X.min(), X.max(), 100)
y_range = linreg_model.predict(x_range.reshape(-1, 1))
fig = px.scatter(
trade_ledger,
title="Performance against Benchmark",
x='benchmark_rtn_per_trading_day',
y='trade_rtn_per_trading_day'
)
fig.add_traces(go.Scatter(x=x_range, y=y_range, name='OLS Fit'))
alpha = str(round(linreg_model.intercept_ * 100, 3)) + "% / trade"
beta = round(linreg_model.coef_[0], 3)
gmrr = (trade_ledger['trade_rtn_per_trading_day'] + 1).product() ** (
1 / len(
trade_ledger)) - 1
avg_trades_per_yr = round(
trade_ledger['open_dt'].groupby(
pd.DatetimeIndex(trade_ledger['open_dt']).year
).agg('count').mean(),
0
)
vol = stdev(trade_ledger['trade_rtn_per_trading_day'])
sharpe = round(gmrr / vol, 3)
gmrr_str = str(round(gmrr, 3)) + "% / trade"
vol_str = str(round(vol, 3)) + "% / trade"
return fig, alpha, beta, gmrr_str, avg_trades_per_yr, vol_str, sharpe
# Run it!
if __name__ == '__main__':
app.run_server(debug=True)
| 41.332231 | 81 | 0.477765 | [
"MIT"
] | SantiLJ/strategy-template | app.py | 25,006 | Python |
from autokeras.generator import DefaultClassifierGenerator
from autokeras.graph import *
from autokeras.net_transformer import legal_graph
from tests.common import get_conv_data, get_add_skip_model, get_conv_dense_model, get_pooling_model, \
get_concat_skip_model
def test_conv_deeper_stub():
graph = get_conv_dense_model()
layer_num = graph.n_layers
graph.to_conv_deeper_model(5, 3)
assert graph.n_layers == layer_num + 4
def test_conv_deeper():
graph = get_conv_dense_model()
model = graph.produce_model()
graph = deepcopy(graph)
graph.to_conv_deeper_model(5, 3)
new_model = graph.produce_model()
input_data = torch.Tensor(get_conv_data())
model.eval()
new_model.eval()
output1 = model(input_data)
output2 = new_model(input_data)
assert (output1 - output2).abs().sum() < 1e-1
def test_dense_deeper_stub():
graph = get_conv_dense_model()
graph.weighted = False
layer_num = graph.n_layers
graph.to_dense_deeper_model(10)
assert graph.n_layers == layer_num + 3
def test_dense_deeper():
graph = get_conv_dense_model()
model = graph.produce_model()
graph = deepcopy(graph)
graph.to_dense_deeper_model(10)
new_model = graph.produce_model()
input_data = torch.Tensor(get_conv_data())
model.eval()
new_model.eval()
output1 = model(input_data)
output2 = new_model(input_data)
assert (output1 - output2).abs().sum() < 1e-4
def test_conv_wider_stub():
graph = get_add_skip_model()
graph.weighted = False
layer_num = graph.n_layers
graph.to_wider_model(9, 3)
assert graph.n_layers == layer_num
def test_conv_wider():
graph = get_concat_skip_model()
model = graph.produce_model()
graph = deepcopy(graph)
graph.to_wider_model(5, 3)
new_model = graph.produce_model()
input_data = torch.Tensor(get_conv_data())
model.eval()
new_model.eval()
output1 = model(input_data)
output2 = new_model(input_data)
assert (output1 - output2).abs().sum() < 1e-1
def test_dense_wider_stub():
graph = get_add_skip_model()
graph.weighted = False
layer_num = graph.n_layers
graph.to_wider_model(32, 3)
assert graph.n_layers == layer_num
def test_dense_wider():
graph = get_add_skip_model()
model = graph.produce_model()
graph = deepcopy(graph)
graph.to_wider_model(32, 3)
new_model = graph.produce_model()
input_data = torch.Tensor(get_conv_data())
model.eval()
new_model.eval()
output1 = model(input_data)
output2 = new_model(input_data)
assert (output1 - output2).abs().sum() < 1e-4
def test_skip_add_over_pooling_stub():
graph = get_pooling_model()
graph.weighted = False
layer_num = graph.n_layers
graph.to_add_skip_model(1, 10)
assert graph.n_layers == layer_num + 6
def test_skip_add_over_pooling():
graph = get_pooling_model()
model = graph.produce_model()
graph = deepcopy(graph)
graph.to_add_skip_model(1, 10)
new_model = graph.produce_model()
input_data = torch.Tensor(get_conv_data())
model.eval()
new_model.eval()
output1 = model(input_data)
output2 = new_model(input_data)
assert (output1 - output2).abs().sum() < 1e-4
def test_skip_concat_over_pooling_stub():
graph = get_pooling_model()
graph.weighted = False
layer_num = graph.n_layers
graph.to_concat_skip_model(1, 14)
assert graph.n_layers == layer_num + 6
def test_skip_concat_over_pooling():
graph = get_pooling_model()
model = graph.produce_model()
graph = deepcopy(graph)
graph.to_concat_skip_model(5, 10)
graph.to_concat_skip_model(5, 10)
new_model = graph.produce_model()
input_data = torch.Tensor(get_conv_data())
model.eval()
new_model.eval()
output1 = model(input_data)
output2 = new_model(input_data)
assert (output1 - output2).abs().sum() < 1e-4
def test_extract_descriptor_add():
descriptor = get_add_skip_model().extract_descriptor()
assert descriptor.n_conv == 5
assert descriptor.n_dense == 2
assert descriptor.skip_connections == [(2, 3, NetworkDescriptor.ADD_CONNECT), (3, 4, NetworkDescriptor.ADD_CONNECT)]
def test_extract_descriptor_concat():
descriptor = get_concat_skip_model().extract_descriptor()
assert descriptor.n_conv == 5
assert descriptor.n_dense == 2
assert descriptor.skip_connections == [(2, 3, NetworkDescriptor.CONCAT_CONNECT),
(3, 4, NetworkDescriptor.CONCAT_CONNECT)]
def test_deep_layer_ids():
graph = get_conv_dense_model()
assert len(graph.deep_layer_ids()) == 3
def test_wide_layer_ids():
graph = get_conv_dense_model()
assert len(graph.wide_layer_ids()) == 2
def test_skip_connection_layer_ids():
graph = get_conv_dense_model()
assert len(graph.skip_connection_layer_ids()) == 1
def test_long_transform():
graph = DefaultClassifierGenerator(10, (32, 32, 3)).generate()
history = [('to_wider_model', 1, 256), ('to_conv_deeper_model', 1, 3),
('to_concat_skip_model', 6, 11)]
for args in history:
getattr(graph, args[0])(*list(args[1:]))
graph.produce_model()
assert legal_graph(graph)
def test_node_consistency():
graph = DefaultClassifierGenerator(10, (32, 32, 3)).generate()
assert graph.layer_list[6].output.shape == (16, 16, 64)
for layer in graph.layer_list:
assert layer.output.shape == layer.output_shape
graph.to_wider_model(6, 64)
assert graph.layer_list[6].output.shape == (16, 16, 128)
for layer in graph.layer_list:
assert layer.output.shape == layer.output_shape
graph.to_conv_deeper_model(6, 3)
assert graph.layer_list[19].output.shape == (16, 16, 128)
for layer in graph.layer_list:
assert layer.output.shape == layer.output_shape
graph.to_add_skip_model(6, 19)
assert graph.layer_list[23].output.shape == (16, 16, 128)
for layer in graph.layer_list:
assert layer.output.shape == layer.output_shape
graph.to_concat_skip_model(6, 19)
assert graph.layer_list[25].output.shape == (16, 16, 128)
for layer in graph.layer_list:
assert layer.output.shape == layer.output_shape
| 26.99569 | 120 | 0.695513 | [
"MIT"
] | MenEnger/autokeras | tests/test_graph.py | 6,263 | Python |
'''
Revealer
Do you have something to hide?
Secret backup plug-in for the electrum wallet.
Tiago Romagnani Silveira, 2017
'''
import os
import random
import traceback
from decimal import Decimal
from functools import partial
import sys
import qrcode
from PyQt5.QtPrintSupport import QPrinter
from PyQt5.QtCore import Qt, QRectF, QRect, QSizeF, QUrl, QPoint, QSize
from PyQt5.QtGui import (QPixmap, QImage, QBitmap, QPainter, QFontDatabase, QPen, QFont,
QColor, QDesktopServices, qRgba, QPainterPath)
from PyQt5.QtWidgets import (QGridLayout, QVBoxLayout, QHBoxLayout, QLabel,
QPushButton, QLineEdit)
from electrum_vestx.plugin import hook
from electrum_vestx.i18n import _
from electrum_vestx.util import make_dir, InvalidPassword, UserCancelled
from electrum_vestx.gui.qt.util import (read_QIcon, EnterButton, WWLabel, icon_path,
WindowModalDialog, Buttons, CloseButton, OkButton)
from electrum_vestx.gui.qt.qrtextedit import ScanQRTextEdit
from electrum_vestx.gui.qt.main_window import StatusBarButton
from .revealer import RevealerPlugin
class Plugin(RevealerPlugin):
MAX_PLAINTEXT_LEN = 189 # chars
def __init__(self, parent, config, name):
RevealerPlugin.__init__(self, parent, config, name)
self.base_dir = os.path.join(config.electrum_path(), 'revealer')
if self.config.get('calibration_h') is None:
self.config.set_key('calibration_h', 0)
if self.config.get('calibration_v') is None:
self.config.set_key('calibration_v', 0)
self.calibration_h = self.config.get('calibration_h')
self.calibration_v = self.config.get('calibration_v')
self.f_size = QSize(1014*2, 642*2)
self.abstand_h = 21
self.abstand_v = 34
self.calibration_noise = int('10' * 128)
self.rawnoise = False
make_dir(self.base_dir)
self.extension = False
@hook
def create_status_bar(self, parent):
b = StatusBarButton(read_QIcon('revealer.png'), "Revealer "+_("secret backup utility"),
partial(self.setup_dialog, parent))
parent.addPermanentWidget(b)
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Printer Calibration'), partial(self.calibration_dialog, window))
def password_dialog(self, msg=None, parent=None):
from electrum_vestx.gui.qt.password_dialog import PasswordDialog
parent = parent or self
d = PasswordDialog(parent, msg)
return d.run()
def get_seed(self):
password = None
if self.wallet.has_keystore_encryption():
password = self.password_dialog(parent=self.d.parent())
if not password:
raise UserCancelled()
keystore = self.wallet.get_keystore()
if not keystore or not keystore.has_seed():
return
self.extension = bool(keystore.get_passphrase(password))
return keystore.get_seed(password)
def setup_dialog(self, window):
self.wallet = window.parent().wallet
self.update_wallet_name(self.wallet)
self.user_input = False
self.d = WindowModalDialog(window, "Setup Dialog")
self.d.setMinimumWidth(500)
self.d.setMinimumHeight(210)
self.d.setMaximumHeight(320)
self.d.setContentsMargins(11,11,1,1)
self.hbox = QHBoxLayout(self.d)
vbox = QVBoxLayout()
logo = QLabel()
self.hbox.addWidget(logo)
logo.setPixmap(QPixmap(icon_path('revealer.png')))
logo.setAlignment(Qt.AlignLeft)
self.hbox.addSpacing(16)
vbox.addWidget(WWLabel("<b>"+_("Revealer Secret Backup Plugin")+"</b><br>"
+_("To encrypt your backup, first we need to load some noise.")+"<br/>"))
vbox.addSpacing(7)
bcreate = QPushButton(_("Create a new Revealer"))
bcreate.setMaximumWidth(181)
bcreate.setDefault(True)
vbox.addWidget(bcreate, Qt.AlignCenter)
self.load_noise = ScanQRTextEdit()
self.load_noise.setTabChangesFocus(True)
self.load_noise.textChanged.connect(self.on_edit)
self.load_noise.setMaximumHeight(33)
self.hbox.addLayout(vbox)
vbox.addWidget(WWLabel(_("or type an existing revealer code below and click 'next':")))
vbox.addWidget(self.load_noise)
vbox.addSpacing(3)
self.next_button = QPushButton(_("Next"), self.d)
self.next_button.setEnabled(False)
vbox.addLayout(Buttons(self.next_button))
self.next_button.clicked.connect(self.d.close)
self.next_button.clicked.connect(partial(self.cypherseed_dialog, window))
vbox.addWidget(
QLabel("<b>" + _("Warning") + "</b>: " + _("Each revealer should be used only once.")
+"<br>"+_("more information at <a href=\"https://revealer.cc/faq\">https://revealer.cc/faq</a>")))
def mk_digital():
try:
self.make_digital(self.d)
except Exception:
self.logger.exception('')
else:
self.cypherseed_dialog(window)
bcreate.clicked.connect(mk_digital)
return bool(self.d.exec_())
def get_noise(self):
text = self.load_noise.text()
return ''.join(text.split()).lower()
def on_edit(self):
txt = self.get_noise()
versioned_seed = self.get_versioned_seed_from_user_input(txt)
if versioned_seed:
self.versioned_seed = versioned_seed
self.user_input = bool(versioned_seed)
self.next_button.setEnabled(bool(versioned_seed))
def make_digital(self, dialog):
self.make_rawnoise(True)
self.bdone(dialog)
self.d.close()
def get_path_to_revealer_file(self, ext: str= '') -> str:
version = self.versioned_seed.version
code_id = self.versioned_seed.checksum
filename = self.filename_prefix + version + "_" + code_id + ext
path = os.path.join(self.base_dir, filename)
return os.path.normcase(os.path.abspath(path))
def get_path_to_calibration_file(self):
path = os.path.join(self.base_dir, 'calibration.pdf')
return os.path.normcase(os.path.abspath(path))
def bcrypt(self, dialog):
self.rawnoise = False
version = self.versioned_seed.version
code_id = self.versioned_seed.checksum
dialog.show_message(''.join([_("{} encrypted for Revealer {}_{} saved as PNG and PDF at: ").format(self.was, version, code_id),
"<b>", self.get_path_to_revealer_file(), "</b>", "<br/>",
"<br/>", "<b>", _("Always check your backups.")]),
rich_text=True)
dialog.close()
def ext_warning(self, dialog):
dialog.show_message(''.join(["<b>",_("Warning"), ": </b>",
_("your seed extension will <b>not</b> be included in the encrypted backup.")]),
rich_text=True)
dialog.close()
def bdone(self, dialog):
version = self.versioned_seed.version
code_id = self.versioned_seed.checksum
dialog.show_message(''.join([_("Digital Revealer ({}_{}) saved as PNG and PDF at:").format(version, code_id),
"<br/>","<b>", self.get_path_to_revealer_file(), '</b>']),
rich_text=True)
def customtxt_limits(self):
txt = self.text.text()
self.max_chars.setVisible(False)
self.char_count.setText(f"({len(txt)}/{self.MAX_PLAINTEXT_LEN})")
if len(txt)>0:
self.ctext.setEnabled(True)
if len(txt) > self.MAX_PLAINTEXT_LEN:
self.text.setPlainText(txt[:self.MAX_PLAINTEXT_LEN])
self.max_chars.setVisible(True)
def t(self):
self.txt = self.text.text()
self.seed_img(is_seed=False)
def warn_old_revealer(self):
if self.versioned_seed.version == '0':
link = "https://revealer.cc/revealer-warning-and-upgrade/"
self.d.show_warning(("<b>{warning}: </b>{ver0}<br>"
"{url}<br>"
"{risk}")
.format(warning=_("Warning"),
ver0=_("Revealers starting with 0 are not secure due to a vulnerability."),
url=_("More info at: {}").format(f'<a href="{link}">{link}</a>'),
risk=_("Proceed at your own risk.")),
rich_text=True)
def cypherseed_dialog(self, window):
self.warn_old_revealer()
d = WindowModalDialog(window, "Encryption Dialog")
d.setMinimumWidth(500)
d.setMinimumHeight(210)
d.setMaximumHeight(450)
d.setContentsMargins(11, 11, 1, 1)
self.c_dialog = d
hbox = QHBoxLayout(d)
self.vbox = QVBoxLayout()
logo = QLabel()
hbox.addWidget(logo)
logo.setPixmap(QPixmap(icon_path('revealer.png')))
logo.setAlignment(Qt.AlignLeft)
hbox.addSpacing(16)
self.vbox.addWidget(WWLabel("<b>" + _("Revealer Secret Backup Plugin") + "</b><br>"
+ _("Ready to encrypt for revealer {}")
.format(self.versioned_seed.version+'_'+self.versioned_seed.checksum)))
self.vbox.addSpacing(11)
hbox.addLayout(self.vbox)
grid = QGridLayout()
self.vbox.addLayout(grid)
cprint = QPushButton(_("Encrypt {}'s seed").format(self.wallet_name))
cprint.setMaximumWidth(250)
cprint.clicked.connect(partial(self.seed_img, True))
self.vbox.addWidget(cprint)
self.vbox.addSpacing(1)
self.vbox.addWidget(WWLabel("<b>"+_("OR")+"</b> "+_("type a custom alphanumerical secret below:")))
self.text = ScanQRTextEdit()
self.text.setTabChangesFocus(True)
self.text.setMaximumHeight(70)
self.text.textChanged.connect(self.customtxt_limits)
self.vbox.addWidget(self.text)
self.char_count = WWLabel("")
self.char_count.setAlignment(Qt.AlignRight)
self.vbox.addWidget(self.char_count)
self.max_chars = WWLabel("<font color='red'>"
+ _("This version supports a maximum of {} characters.").format(self.MAX_PLAINTEXT_LEN)
+"</font>")
self.vbox.addWidget(self.max_chars)
self.max_chars.setVisible(False)
self.ctext = QPushButton(_("Encrypt custom secret"))
self.ctext.clicked.connect(self.t)
self.vbox.addWidget(self.ctext)
self.ctext.setEnabled(False)
self.vbox.addSpacing(11)
self.vbox.addLayout(Buttons(CloseButton(d)))
return bool(d.exec_())
def update_wallet_name(self, name):
self.wallet_name = str(name)
def seed_img(self, is_seed = True):
if is_seed:
try:
cseed = self.get_seed()
except UserCancelled:
return
except InvalidPassword as e:
self.d.show_error(str(e))
return
if not cseed:
self.d.show_message(_("This wallet has no seed"))
return
txt = cseed.upper()
else:
txt = self.txt.upper()
img = QImage(self.SIZE[0], self.SIZE[1], QImage.Format_Mono)
bitmap = QBitmap.fromImage(img, Qt.MonoOnly)
bitmap.fill(Qt.white)
painter = QPainter()
painter.begin(bitmap)
QFontDatabase.addApplicationFont(os.path.join(os.path.dirname(__file__), 'SourceSansPro-Bold.otf') )
if len(txt) < 102 :
fontsize = 15
linespace = 15
max_letters = 17
max_lines = 6
max_words = 3
else:
fontsize = 12
linespace = 10
max_letters = 21
max_lines = 9
max_words = int(max_letters/4)
font = QFont('Source Sans Pro', fontsize, QFont.Bold)
font.setLetterSpacing(QFont.PercentageSpacing, 100)
font.setPixelSize(fontsize)
painter.setFont(font)
seed_array = txt.split(' ')
for n in range(max_lines):
nwords = max_words
temp_seed = seed_array[:nwords]
while len(' '.join(map(str, temp_seed))) > max_letters:
nwords = nwords - 1
temp_seed = seed_array[:nwords]
painter.drawText(QRect(0, linespace*n , self.SIZE[0], self.SIZE[1]), Qt.AlignHCenter, ' '.join(map(str, temp_seed)))
del seed_array[:nwords]
painter.end()
img = bitmap.toImage()
if (self.rawnoise == False):
self.make_rawnoise()
self.make_cypherseed(img, self.rawnoise, False, is_seed)
return img
def make_rawnoise(self, create_revealer=False):
if not self.user_input:
self.versioned_seed = self.gen_random_versioned_seed()
assert self.versioned_seed
w, h = self.SIZE
rawnoise = QImage(w, h, QImage.Format_Mono)
noise_map = self.get_noise_map(self.versioned_seed)
for (x,y), pixel in noise_map.items():
rawnoise.setPixel(x, y, pixel)
self.rawnoise = rawnoise
if create_revealer:
self.make_revealer()
def make_calnoise(self):
random.seed(self.calibration_noise)
w, h = self.SIZE
rawnoise = QImage(w, h, QImage.Format_Mono)
for x in range(w):
for y in range(h):
rawnoise.setPixel(x,y,random.randint(0, 1))
self.calnoise = self.pixelcode_2x2(rawnoise)
def make_revealer(self):
revealer = self.pixelcode_2x2(self.rawnoise)
revealer.invertPixels()
revealer = QBitmap.fromImage(revealer)
revealer = revealer.scaled(self.f_size, Qt.KeepAspectRatio)
revealer = self.overlay_marks(revealer)
self.filename_prefix = 'revealer_'
revealer.save(self.get_path_to_revealer_file('.png'))
self.toPdf(QImage(revealer))
QDesktopServices.openUrl(QUrl.fromLocalFile(self.get_path_to_revealer_file('.pdf')))
def make_cypherseed(self, img, rawnoise, calibration=False, is_seed = True):
img = img.convertToFormat(QImage.Format_Mono)
p = QPainter()
p.begin(img)
p.setCompositionMode(26) #xor
p.drawImage(0, 0, rawnoise)
p.end()
cypherseed = self.pixelcode_2x2(img)
cypherseed = QBitmap.fromImage(cypherseed)
cypherseed = cypherseed.scaled(self.f_size, Qt.KeepAspectRatio)
cypherseed = self.overlay_marks(cypherseed, True, calibration)
if not is_seed:
self.filename_prefix = 'custom_secret_'
self.was = _('Custom secret')
else:
self.filename_prefix = self.wallet_name + '_seed_'
self.was = self.wallet_name + ' ' + _('seed')
if self.extension:
self.ext_warning(self.c_dialog)
if not calibration:
self.toPdf(QImage(cypherseed))
QDesktopServices.openUrl(QUrl.fromLocalFile(self.get_path_to_revealer_file('.pdf')))
cypherseed.save(self.get_path_to_revealer_file('.png'))
self.bcrypt(self.c_dialog)
return cypherseed
def calibration(self):
img = QImage(self.SIZE[0], self.SIZE[1], QImage.Format_Mono)
bitmap = QBitmap.fromImage(img, Qt.MonoOnly)
bitmap.fill(Qt.black)
self.make_calnoise()
img = self.overlay_marks(self.calnoise.scaledToHeight(self.f_size.height()), False, True)
self.calibration_pdf(img)
QDesktopServices.openUrl(QUrl.fromLocalFile(self.get_path_to_calibration_file()))
return img
def toPdf(self, image):
printer = QPrinter()
printer.setPaperSize(QSizeF(210, 297), QPrinter.Millimeter)
printer.setResolution(600)
printer.setOutputFormat(QPrinter.PdfFormat)
printer.setOutputFileName(self.get_path_to_revealer_file('.pdf'))
printer.setPageMargins(0,0,0,0,6)
painter = QPainter()
painter.begin(printer)
delta_h = round(image.width()/self.abstand_v)
delta_v = round(image.height()/self.abstand_h)
size_h = 2028+((int(self.calibration_h)*2028/(2028-(delta_h*2)+int(self.calibration_h)))/2)
size_v = 1284+((int(self.calibration_v)*1284/(1284-(delta_v*2)+int(self.calibration_v)))/2)
image = image.scaled(size_h, size_v)
painter.drawImage(553,533, image)
wpath = QPainterPath()
wpath.addRoundedRect(QRectF(553,533, size_h, size_v), 19, 19)
painter.setPen(QPen(Qt.black, 1))
painter.drawPath(wpath)
painter.end()
def calibration_pdf(self, image):
printer = QPrinter()
printer.setPaperSize(QSizeF(210, 297), QPrinter.Millimeter)
printer.setResolution(600)
printer.setOutputFormat(QPrinter.PdfFormat)
printer.setOutputFileName(self.get_path_to_calibration_file())
printer.setPageMargins(0,0,0,0,6)
painter = QPainter()
painter.begin(printer)
painter.drawImage(553,533, image)
font = QFont('Source Sans Pro', 10, QFont.Bold)
painter.setFont(font)
painter.drawText(254,277, _("Calibration sheet"))
font = QFont('Source Sans Pro', 7, QFont.Bold)
painter.setFont(font)
painter.drawText(600,2077, _("Instructions:"))
font = QFont('Source Sans Pro', 7, QFont.Normal)
painter.setFont(font)
painter.drawText(700, 2177, _("1. Place this paper on a flat and well iluminated surface."))
painter.drawText(700, 2277, _("2. Align your Revealer borderlines to the dashed lines on the top and left."))
painter.drawText(700, 2377, _("3. Press slightly the Revealer against the paper and read the numbers that best "
"match on the opposite sides. "))
painter.drawText(700, 2477, _("4. Type the numbers in the software"))
painter.end()
def pixelcode_2x2(self, img):
result = QImage(img.width()*2, img.height()*2, QImage.Format_ARGB32 )
white = qRgba(255,255,255,0)
black = qRgba(0,0,0,255)
for x in range(img.width()):
for y in range(img.height()):
c = img.pixel(QPoint(x,y))
colors = QColor(c).getRgbF()
if colors[0]:
result.setPixel(x*2+1,y*2+1, black)
result.setPixel(x*2,y*2+1, white)
result.setPixel(x*2+1,y*2, white)
result.setPixel(x*2, y*2, black)
else:
result.setPixel(x*2+1,y*2+1, white)
result.setPixel(x*2,y*2+1, black)
result.setPixel(x*2+1,y*2, black)
result.setPixel(x*2, y*2, white)
return result
def overlay_marks(self, img, is_cseed=False, calibration_sheet=False):
border_color = Qt.white
base_img = QImage(self.f_size.width(),self.f_size.height(), QImage.Format_ARGB32)
base_img.fill(border_color)
img = QImage(img)
painter = QPainter()
painter.begin(base_img)
total_distance_h = round(base_img.width() / self.abstand_v)
dist_v = round(total_distance_h) / 2
dist_h = round(total_distance_h) / 2
img = img.scaledToWidth(base_img.width() - (2 * (total_distance_h)))
painter.drawImage(total_distance_h,
total_distance_h,
img)
#frame around image
pen = QPen(Qt.black, 2)
painter.setPen(pen)
#horz
painter.drawLine(0, total_distance_h, base_img.width(), total_distance_h)
painter.drawLine(0, base_img.height()-(total_distance_h), base_img.width(), base_img.height()-(total_distance_h))
#vert
painter.drawLine(total_distance_h, 0, total_distance_h, base_img.height())
painter.drawLine(base_img.width()-(total_distance_h), 0, base_img.width()-(total_distance_h), base_img.height())
#border around img
border_thick = 6
Rpath = QPainterPath()
Rpath.addRect(QRectF((total_distance_h)+(border_thick/2),
(total_distance_h)+(border_thick/2),
base_img.width()-((total_distance_h)*2)-((border_thick)-1),
(base_img.height()-((total_distance_h))*2)-((border_thick)-1)))
pen = QPen(Qt.black, border_thick)
pen.setJoinStyle (Qt.MiterJoin)
painter.setPen(pen)
painter.drawPath(Rpath)
Bpath = QPainterPath()
Bpath.addRect(QRectF((total_distance_h), (total_distance_h),
base_img.width()-((total_distance_h)*2), (base_img.height()-((total_distance_h))*2)))
pen = QPen(Qt.black, 1)
painter.setPen(pen)
painter.drawPath(Bpath)
pen = QPen(Qt.black, 1)
painter.setPen(pen)
painter.drawLine(0, base_img.height()/2, total_distance_h, base_img.height()/2)
painter.drawLine(base_img.width()/2, 0, base_img.width()/2, total_distance_h)
painter.drawLine(base_img.width()-total_distance_h, base_img.height()/2, base_img.width(), base_img.height()/2)
painter.drawLine(base_img.width()/2, base_img.height(), base_img.width()/2, base_img.height() - total_distance_h)
#print code
f_size = 37
QFontDatabase.addApplicationFont(os.path.join(os.path.dirname(__file__), 'DejaVuSansMono-Bold.ttf'))
font = QFont("DejaVu Sans Mono", f_size-11, QFont.Bold)
font.setPixelSize(35)
painter.setFont(font)
if not calibration_sheet:
if is_cseed: #its a secret
painter.setPen(QPen(Qt.black, 1, Qt.DashDotDotLine))
painter.drawLine(0, dist_v, base_img.width(), dist_v)
painter.drawLine(dist_h, 0, dist_h, base_img.height())
painter.drawLine(0, base_img.height()-dist_v, base_img.width(), base_img.height()-(dist_v))
painter.drawLine(base_img.width()-(dist_h), 0, base_img.width()-(dist_h), base_img.height())
painter.drawImage(((total_distance_h))+11, ((total_distance_h))+11,
QImage(icon_path('electrumb.png')).scaledToWidth(2.1*(total_distance_h), Qt.SmoothTransformation))
painter.setPen(QPen(Qt.white, border_thick*8))
painter.drawLine(base_img.width()-((total_distance_h))-(border_thick*8)/2-(border_thick/2)-2,
(base_img.height()-((total_distance_h)))-((border_thick*8)/2)-(border_thick/2)-2,
base_img.width()-((total_distance_h))-(border_thick*8)/2-(border_thick/2)-2 - 77,
(base_img.height()-((total_distance_h)))-((border_thick*8)/2)-(border_thick/2)-2)
painter.setPen(QColor(0,0,0,255))
painter.drawText(QRect(0, base_img.height()-107, base_img.width()-total_distance_h - border_thick - 11,
base_img.height()-total_distance_h - border_thick), Qt.AlignRight,
self.versioned_seed.version + '_'+self.versioned_seed.checksum)
painter.end()
else: # revealer
painter.setPen(QPen(border_color, 17))
painter.drawLine(0, dist_v, base_img.width(), dist_v)
painter.drawLine(dist_h, 0, dist_h, base_img.height())
painter.drawLine(0, base_img.height()-dist_v, base_img.width(), base_img.height()-(dist_v))
painter.drawLine(base_img.width()-(dist_h), 0, base_img.width()-(dist_h), base_img.height())
painter.setPen(QPen(Qt.black, 2))
painter.drawLine(0, dist_v, base_img.width(), dist_v)
painter.drawLine(dist_h, 0, dist_h, base_img.height())
painter.drawLine(0, base_img.height()-dist_v, base_img.width(), base_img.height()-(dist_v))
painter.drawLine(base_img.width()-(dist_h), 0, base_img.width()-(dist_h), base_img.height())
logo = QImage(icon_path('revealer_c.png')).scaledToWidth(1.3*(total_distance_h))
painter.drawImage((total_distance_h)+ (border_thick), ((total_distance_h))+ (border_thick), logo, Qt.SmoothTransformation)
#frame around logo
painter.setPen(QPen(Qt.black, border_thick))
painter.drawLine(total_distance_h+border_thick, total_distance_h+logo.height()+3*(border_thick/2),
total_distance_h+logo.width()+border_thick, total_distance_h+logo.height()+3*(border_thick/2))
painter.drawLine(logo.width()+total_distance_h+3*(border_thick/2), total_distance_h+(border_thick),
total_distance_h+logo.width()+3*(border_thick/2), total_distance_h+logo.height()+(border_thick))
#frame around code/qr
qr_size = 179
painter.drawLine((base_img.width()-((total_distance_h))-(border_thick/2)-2)-qr_size,
(base_img.height()-((total_distance_h)))-((border_thick*8))-(border_thick/2)-2,
(base_img.width()/2+(total_distance_h/2)-border_thick-(border_thick*8)/2)-qr_size,
(base_img.height()-((total_distance_h)))-((border_thick*8))-(border_thick/2)-2)
painter.drawLine((base_img.width()/2+(total_distance_h/2)-border_thick-(border_thick*8)/2)-qr_size,
(base_img.height()-((total_distance_h)))-((border_thick*8))-(border_thick/2)-2,
base_img.width()/2 + (total_distance_h/2)-border_thick-(border_thick*8)/2-qr_size,
((base_img.height()-((total_distance_h)))-(border_thick/2)-2))
painter.setPen(QPen(Qt.white, border_thick * 8))
painter.drawLine(
base_img.width() - ((total_distance_h)) - (border_thick * 8) / 2 - (border_thick / 2) - 2,
(base_img.height() - ((total_distance_h))) - ((border_thick * 8) / 2) - (border_thick / 2) - 2,
base_img.width() / 2 + (total_distance_h / 2) - border_thick - qr_size,
(base_img.height() - ((total_distance_h))) - ((border_thick * 8) / 2) - (border_thick / 2) - 2)
painter.setPen(QColor(0,0,0,255))
painter.drawText(QRect(((base_img.width()/2) +21)-qr_size, base_img.height()-107,
base_img.width()-total_distance_h - border_thick -93,
base_img.height()-total_distance_h - border_thick), Qt.AlignLeft, self.versioned_seed.get_ui_string_version_plus_seed())
painter.drawText(QRect(0, base_img.height()-107, base_img.width()-total_distance_h - border_thick -3 -qr_size,
base_img.height()-total_distance_h - border_thick), Qt.AlignRight, self.versioned_seed.checksum)
# draw qr code
qr_qt = self.paintQR(self.versioned_seed.get_ui_string_version_plus_seed()
+ self.versioned_seed.checksum)
target = QRectF(base_img.width()-65-qr_size,
base_img.height()-65-qr_size,
qr_size, qr_size )
painter.drawImage(target, qr_qt)
painter.setPen(QPen(Qt.black, 4))
painter.drawLine(base_img.width()-65-qr_size,
base_img.height()-65-qr_size,
base_img.width() - 65 - qr_size,
(base_img.height() - ((total_distance_h))) - ((border_thick * 8)) - (border_thick / 2) - 4
)
painter.drawLine(base_img.width()-65-qr_size,
base_img.height()-65-qr_size,
base_img.width() - 65,
base_img.height()-65-qr_size
)
painter.end()
else: # calibration only
painter.end()
cal_img = QImage(self.f_size.width() + 100, self.f_size.height() + 100,
QImage.Format_ARGB32)
cal_img.fill(Qt.white)
cal_painter = QPainter()
cal_painter.begin(cal_img)
cal_painter.drawImage(0,0, base_img)
#black lines in the middle of border top left only
cal_painter.setPen(QPen(Qt.black, 1, Qt.DashDotDotLine))
cal_painter.drawLine(0, dist_v, base_img.width(), dist_v)
cal_painter.drawLine(dist_h, 0, dist_h, base_img.height())
pen = QPen(Qt.black, 2, Qt.DashDotDotLine)
cal_painter.setPen(pen)
n=15
cal_painter.setFont(QFont("DejaVu Sans Mono", 21, QFont.Bold))
for x in range(-n,n):
#lines on bottom (vertical calibration)
cal_painter.drawLine((((base_img.width())/(n*2)) *(x))+ (base_img.width()/2)-13,
x+2+base_img.height()-(dist_v),
(((base_img.width())/(n*2)) *(x))+ (base_img.width()/2)+13,
x+2+base_img.height()-(dist_v))
num_pos = 9
if x > 9 : num_pos = 17
if x < 0 : num_pos = 20
if x < -9: num_pos = 27
cal_painter.drawText((((base_img.width())/(n*2)) *(x))+ (base_img.width()/2)-num_pos,
50+base_img.height()-(dist_v),
str(x))
#lines on the right (horizontal calibrations)
cal_painter.drawLine(x+2+(base_img.width()-(dist_h)),
((base_img.height()/(2*n)) *(x))+ (base_img.height()/n)+(base_img.height()/2)-13,
x+2+(base_img.width()-(dist_h)),
((base_img.height()/(2*n)) *(x))+ (base_img.height()/n)+(base_img.height()/2)+13)
cal_painter.drawText(30+(base_img.width()-(dist_h)),
((base_img.height()/(2*n)) *(x))+ (base_img.height()/2)+13, str(x))
cal_painter.end()
base_img = cal_img
return base_img
def paintQR(self, data):
if not data:
return
qr = qrcode.QRCode()
qr.add_data(data)
matrix = qr.get_matrix()
k = len(matrix)
border_color = Qt.white
base_img = QImage(k * 5, k * 5, QImage.Format_ARGB32)
base_img.fill(border_color)
qrpainter = QPainter()
qrpainter.begin(base_img)
boxsize = 5
size = k * boxsize
left = (base_img.width() - size)/2
top = (base_img.height() - size)/2
qrpainter.setBrush(Qt.black)
qrpainter.setPen(Qt.black)
for r in range(k):
for c in range(k):
if matrix[r][c]:
qrpainter.drawRect(left+c*boxsize, top+r*boxsize, boxsize - 1, boxsize - 1)
qrpainter.end()
return base_img
def calibration_dialog(self, window):
d = WindowModalDialog(window, _("Revealer - Printer calibration settings"))
d.setMinimumSize(100, 200)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(''.join(["<br/>", _("If you have an old printer, or want optimal precision"),"<br/>",
_("print the calibration pdf and follow the instructions "), "<br/>","<br/>",
])))
self.calibration_h = self.config.get('calibration_h')
self.calibration_v = self.config.get('calibration_v')
cprint = QPushButton(_("Open calibration pdf"))
cprint.clicked.connect(self.calibration)
vbox.addWidget(cprint)
vbox.addWidget(QLabel(_('Calibration values:')))
grid = QGridLayout()
vbox.addLayout(grid)
grid.addWidget(QLabel(_('Right side')), 0, 0)
horizontal = QLineEdit()
horizontal.setText(str(self.calibration_h))
grid.addWidget(horizontal, 0, 1)
grid.addWidget(QLabel(_('Bottom')), 1, 0)
vertical = QLineEdit()
vertical.setText(str(self.calibration_v))
grid.addWidget(vertical, 1, 1)
vbox.addStretch()
vbox.addSpacing(13)
vbox.addLayout(Buttons(CloseButton(d), OkButton(d)))
if not d.exec_():
return
self.calibration_h = int(Decimal(horizontal.text()))
self.config.set_key('calibration_h', self.calibration_h)
self.calibration_v = int(Decimal(vertical.text()))
self.config.set_key('calibration_v', self.calibration_v)
| 43.717848 | 159 | 0.584877 | [
"MIT"
] | anonymouszar/electrum-vestx | electrum_vestx/plugins/revealer/qt.py | 33,313 | Python |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetNetworkInterfaceTapConfigurationResult',
'AwaitableGetNetworkInterfaceTapConfigurationResult',
'get_network_interface_tap_configuration',
]
@pulumi.output_type
class GetNetworkInterfaceTapConfigurationResult:
"""
Tap configuration in a Network Interface.
"""
def __init__(__self__, etag=None, name=None, provisioning_state=None, type=None, virtual_network_tap=None):
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if virtual_network_tap and not isinstance(virtual_network_tap, dict):
raise TypeError("Expected argument 'virtual_network_tap' to be a dict")
pulumi.set(__self__, "virtual_network_tap", virtual_network_tap)
@property
@pulumi.getter
def etag(self) -> str:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the network interface tap configuration resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> str:
"""
Sub Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="virtualNetworkTap")
def virtual_network_tap(self) -> Optional['outputs.VirtualNetworkTapResponse']:
"""
The reference to the Virtual Network Tap resource.
"""
return pulumi.get(self, "virtual_network_tap")
class AwaitableGetNetworkInterfaceTapConfigurationResult(GetNetworkInterfaceTapConfigurationResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetNetworkInterfaceTapConfigurationResult(
etag=self.etag,
name=self.name,
provisioning_state=self.provisioning_state,
type=self.type,
virtual_network_tap=self.virtual_network_tap)
def get_network_interface_tap_configuration(network_interface_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
tap_configuration_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkInterfaceTapConfigurationResult:
"""
Use this data source to access information about an existing resource.
:param str network_interface_name: The name of the network interface.
:param str resource_group_name: The name of the resource group.
:param str tap_configuration_name: The name of the tap configuration.
"""
__args__ = dict()
__args__['networkInterfaceName'] = network_interface_name
__args__['resourceGroupName'] = resource_group_name
__args__['tapConfigurationName'] = tap_configuration_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:network/v20200301:getNetworkInterfaceTapConfiguration', __args__, opts=opts, typ=GetNetworkInterfaceTapConfigurationResult).value
return AwaitableGetNetworkInterfaceTapConfigurationResult(
etag=__ret__.etag,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
type=__ret__.type,
virtual_network_tap=__ret__.virtual_network_tap)
| 39.743802 | 180 | 0.680183 | [
"Apache-2.0"
] | test-wiz-sec/pulumi-azure-nextgen | sdk/python/pulumi_azure_nextgen/network/v20200301/get_network_interface_tap_configuration.py | 4,809 | Python |
# -*- coding: utf-8 -*-
import codecs
import re
import sys
from distutils.core import setup
import os
if sys.version_info < (3, 5, 0):
raise RuntimeError("aio-space-track-api requires Python 3.5.0+")
PROJECT_DIR = os.path.abspath(os.path.dirname(__file__))
VERSION_REGEXP = re.compile(r"^__version__ = [\'\"](.+?)[\'\"]$", re.MULTILINE)
def read(fn):
with codecs.open(os.path.join(PROJECT_DIR, fn), encoding='utf-8') as f:
return f.read().strip()
def version():
try:
return VERSION_REGEXP.findall(read(os.path.join('aio_space_track_api', '__init__.py')))[0]
except IndexError:
raise RuntimeError('Unable to determine version.')
vn = version()
url = 'https://github.com/nkoshell/aio-space-track-api'
setup(
name='aio-space-track-api',
description='Small async wrapper for "space-track-api" package.',
long_description=read('README.rst'),
version=vn,
packages=['aio_space_track_api'],
url=url,
download_url='{url}/archive/{version}.tar.gz'.format(url=url, version=vn),
license='MIT',
author='NikitaKoshelev',
author_email='[email protected]',
install_requires=['aiohttp>=2.0.7', 'space-track-api>=1.0.2'],
)
| 26.866667 | 98 | 0.671629 | [
"MIT"
] | NikitaKoshelev/aio-space-track-api | setup.py | 1,209 | Python |
# https://github.com/facebookresearch/torchbeast/blob/master/torchbeast/core/environment.py
import numpy as np
from collections import deque
import gym
from gym import spaces
import cv2
cv2.ocl.setUseOpenCL(False)
class NoopResetEnv(gym.Wrapper):
def __init__(self, env, noop_max=30):
"""Sample initial states by taking random number of no-ops on reset.
No-op is assumed to be action 0.
"""
gym.Wrapper.__init__(self, env)
self.noop_max = noop_max
self.override_num_noops = None
self.noop_action = 0
assert env.unwrapped.get_action_meanings()[0] == 'NOOP'
def reset(self, **kwargs):
""" Do no-op action for a number of steps in [1, noop_max]."""
self.env.reset(**kwargs)
if self.override_num_noops is not None:
noops = self.override_num_noops
else:
noops = self.unwrapped.np_random.randint(1, self.noop_max + 1) #pylint: disable=E1101
assert noops > 0
obs = None
for _ in range(noops):
obs, _, done, _ = self.env.step(self.noop_action)
if done:
obs = self.env.reset(**kwargs)
return obs
def step(self, ac):
return self.env.step(ac)
class FireResetEnv(gym.Wrapper):
def __init__(self, env):
"""Take action on reset for environments that are fixed until firing."""
gym.Wrapper.__init__(self, env)
assert env.unwrapped.get_action_meanings()[1] == 'FIRE'
assert len(env.unwrapped.get_action_meanings()) >= 3
def reset(self, **kwargs):
self.env.reset(**kwargs)
obs, _, done, _ = self.env.step(1)
if done:
self.env.reset(**kwargs)
obs, _, done, _ = self.env.step(2)
if done:
self.env.reset(**kwargs)
return obs
def step(self, ac):
return self.env.step(ac)
class EpisodicLifeEnv(gym.Wrapper):
def __init__(self, env):
"""Make end-of-life == end-of-episode, but only reset on true game over.
Done by DeepMind for the DQN and co. since it helps value estimation.
"""
gym.Wrapper.__init__(self, env)
self.lives = 0
self.was_real_done = True
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.was_real_done = done
# check current lives, make loss of life terminal,
# then update lives to handle bonus lives
lives = self.env.unwrapped.ale.lives()
if lives < self.lives and lives > 0:
# for Qbert sometimes we stay in lives == 0 condition for a few frames
# so it's important to keep lives > 0, so that we only reset once
# the environment advertises done.
done = True
self.lives = lives
return obs, reward, done, info
def reset(self, **kwargs):
"""Reset only when lives are exhausted.
This way all states are still reachable even though lives are episodic,
and the learner need not know about any of this behind-the-scenes.
"""
if self.was_real_done:
obs = self.env.reset(**kwargs)
else:
# no-op step to advance from terminal/lost life state
obs, _, _, _ = self.env.step(0)
self.lives = self.env.unwrapped.ale.lives()
return obs
class MaxAndSkipEnv(gym.Wrapper):
def __init__(self, env, skip=4):
"""Return only every `skip`-th frame"""
gym.Wrapper.__init__(self, env)
# most recent raw observations (for max pooling across time steps)
self._obs_buffer = np.zeros((2,)+env.observation_space.shape, dtype=np.uint8)
self._skip = skip
def step(self, action):
"""Repeat action, sum reward, and max over last observations."""
total_reward = 0.0
done = None
for i in range(self._skip):
obs, reward, done, info = self.env.step(action)
if i == self._skip - 2: self._obs_buffer[0] = obs
if i == self._skip - 1: self._obs_buffer[1] = obs
total_reward += reward
if done:
break
# Note that the observation on the done=True frame
# doesn't matter
max_frame = self._obs_buffer.max(axis=0)
return max_frame, total_reward, done, info
def reset(self, **kwargs):
return self.env.reset(**kwargs)
class ClipRewardEnv(gym.RewardWrapper):
def __init__(self, env):
gym.RewardWrapper.__init__(self, env)
def reward(self, reward):
"""Bin reward to {+1, 0, -1} by its sign."""
return np.sign(reward)
class WarpFrame(gym.ObservationWrapper):
def __init__(self, env, width=84, height=84, grayscale=True, dict_space_key=None):
"""
Warp frames to 84x84 as done in the Nature paper and later work.
If the environment uses dictionary observations, `dict_space_key` can be specified which indicates which
observation should be warped.
"""
super().__init__(env)
self._width = width
self._height = height
self._grayscale = grayscale
self._key = dict_space_key
if self._grayscale:
num_colors = 1
else:
num_colors = 3
new_space = gym.spaces.Box(
low=0,
high=255,
shape=(self._height, self._width, num_colors),
dtype=np.uint8,
)
if self._key is None:
original_space = self.observation_space
self.observation_space = new_space
else:
original_space = self.observation_space.spaces[self._key]
self.observation_space.spaces[self._key] = new_space
assert original_space.dtype == np.uint8 and len(original_space.shape) == 3
def observation(self, obs):
if self._key is None:
frame = obs
else:
frame = obs[self._key]
if self._grayscale:
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)
frame = cv2.resize(
frame, (self._width, self._height), interpolation=cv2.INTER_AREA
)
if self._grayscale:
frame = np.expand_dims(frame, -1)
if self._key is None:
obs = frame
else:
obs = obs.copy()
obs[self._key] = frame
return obs
class FrameStack(gym.Wrapper):
def __init__(self, env, k):
"""Stack k last frames.
Returns lazy array, which is much more memory efficient.
See Also
--------
baselines.common.atari_wrappers.LazyFrames
"""
gym.Wrapper.__init__(self, env)
self.k = k
self.frames = deque([], maxlen=k)
shp = env.observation_space.shape
self.observation_space = spaces.Box(low=0, high=255, shape=((shp[0] * k,)+shp[1:]), dtype=env.observation_space.dtype)
def reset(self):
ob = self.env.reset()
for _ in range(self.k):
self.frames.append(ob)
return self._get_ob()
def step(self, action):
ob, reward, done, info = self.env.step(action)
self.frames.append(ob)
return self._get_ob(), reward, done, info
def _get_ob(self):
assert len(self.frames) == self.k
return LazyFrames(list(self.frames))
class ScaledFloatFrame(gym.ObservationWrapper):
def __init__(self, env):
gym.ObservationWrapper.__init__(self, env)
self.observation_space = gym.spaces.Box(low=0, high=1, shape=env.observation_space.shape, dtype=np.float32)
def observation(self, observation):
# careful! This undoes the memory optimization, use
# with smaller replay buffers only.
return np.array(observation).astype(np.float32) / 255.0
class LazyFrames(object):
def __init__(self, frames):
"""This object ensures that common frames between the observations are only stored once.
It exists purely to optimize memory usage which can be huge for DQN's 1M frames replay
buffers.
This object should only be converted to numpy array before being passed to the model.
You'd not believe how complex the previous solution was."""
self._frames = frames
self._out = None
def _force(self):
if self._out is None:
self._out = np.concatenate(self._frames, axis=0)
self._frames = None
return self._out
def __array__(self, dtype=None):
out = self._force()
if dtype is not None:
out = out.astype(dtype)
return out
def __len__(self):
return len(self._force())
def __getitem__(self, i):
return self._force()[i]
def count(self):
frames = self._force()
return frames.shape[frames.ndim - 1]
def frame(self, i):
return self._force()[..., i]
def wrap_atari(env, max_episode_steps=None):
assert 'NoFrameskip' in env.spec.id
env = NoopResetEnv(env, noop_max=30)
env = MaxAndSkipEnv(env, skip=4)
assert max_episode_steps is None
return env
class ImageToPyTorch(gym.ObservationWrapper):
"""
Image shape to channels x weight x height
"""
def __init__(self, env):
super(ImageToPyTorch, self).__init__(env)
old_shape = self.observation_space.shape
self.observation_space = gym.spaces.Box(
low=0,
high=255,
shape=(old_shape[-1], old_shape[0], old_shape[1]),
dtype=np.uint8,
)
def observation(self, observation):
return np.transpose(observation, axes=(2, 0, 1))
def wrap_deepmind(env, episode_life=True, clip_rewards=True, frame_stack=False, scale=False):
"""Configure environment for DeepMind-style Atari.
"""
if episode_life:
env = EpisodicLifeEnv(env)
if 'FIRE' in env.unwrapped.get_action_meanings():
env = FireResetEnv(env)
env = WarpFrame(env)
if scale:
env = ScaledFloatFrame(env)
if clip_rewards:
env = ClipRewardEnv(env)
env = ImageToPyTorch(env)
if frame_stack:
env = FrameStack(env, 4)
return env
# Reference: https://www.cs.toronto.edu/~vmnih/docs/dqn.pdf
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.utils.tensorboard import SummaryWriter
import argparse
from distutils.util import strtobool
import collections
import numpy as np
import gym
from gym.wrappers import TimeLimit, Monitor
from gym.spaces import Discrete, Box, MultiBinary, MultiDiscrete, Space
import time
import random
import os
import matplotlib
matplotlib.use('Agg')
import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
from PIL import Image
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Double DQN Agent')
# Common arguments
parser.add_argument('--exp-name', type=str, default=os.path.basename(__file__).rstrip(".py"),
help='the name of this experiment')
parser.add_argument('--gym-id', type=str, default="BreakoutNoFrameskip-v4",
help='the id of the gym environment')
parser.add_argument('--learning-rate', type=float, default=1e-4,
help='the learning rate of the optimizer')
parser.add_argument('--seed', type=int, default=2,
help='seed of the experiment')
parser.add_argument('--total-timesteps', type=int, default=10000000,
help='total timesteps of the experiments')
parser.add_argument('--torch-deterministic', type=lambda x:bool(strtobool(x)), default=True, nargs='?', const=True,
help='if toggled, `torch.backends.cudnn.deterministic=False`')
parser.add_argument('--cuda', type=lambda x:bool(strtobool(x)), default=True, nargs='?', const=True,
help='if toggled, cuda will not be enabled by default')
parser.add_argument('--prod-mode', type=lambda x:bool(strtobool(x)), default=False, nargs='?', const=True,
help='run the script in production mode and use wandb to log outputs')
parser.add_argument('--capture-video', type=lambda x:bool(strtobool(x)), default=False, nargs='?', const=True,
help='weather to capture videos of the agent performances (check out `videos` folder)')
parser.add_argument('--wandb-project-name', type=str, default="cleanRL",
help="the wandb's project name")
parser.add_argument('--wandb-entity', type=str, default=None,
help="the entity (team) of wandb's project")
# Algorithm specific arguments
parser.add_argument('--buffer-size', type=int, default=1000000,
help='the replay memory buffer size')
parser.add_argument('--gamma', type=float, default=0.99,
help='the discount factor gamma')
parser.add_argument('--target-network-frequency', type=int, default=1000,
help="the timesteps it takes to update the target network")
parser.add_argument('--max-grad-norm', type=float, default=0.5,
help='the maximum norm for the gradient clipping')
parser.add_argument('--batch-size', type=int, default=32,
help="the batch size of sample from the reply memory")
parser.add_argument('--start-e', type=float, default=1.,
help="the starting epsilon for exploration")
parser.add_argument('--end-e', type=float, default=0.02,
help="the ending epsilon for exploration")
parser.add_argument('--exploration-fraction', type=float, default=0.10,
help="the fraction of `total-timesteps` it takes from start-e to go end-e")
parser.add_argument('--learning-starts', type=int, default=80000,
help="timestep to start learning")
parser.add_argument('--train-frequency', type=int, default=4,
help="the frequency of training")
args = parser.parse_args()
if not args.seed:
args.seed = int(time.time())
class QValueVisualizationWrapper(gym.Wrapper):
def __init__(self, env):
super().__init__(env)
self.env.reset()
self.image_shape = self.env.render(mode="rgb_array").shape
self.q_values = [[0.,0.,0.,0.]]
# self.metadata['video.frames_per_second'] = 60
def set_q_values(self, q_values):
self.q_values = q_values
def render(self, mode="human"):
if mode=="rgb_array":
env_rgb_array = super().render(mode)
fig, ax = plt.subplots(figsize=(self.image_shape[1]/100,self.image_shape[0]/100), constrained_layout=True, dpi=100)
df = pd.DataFrame(np.array(self.q_values).T)
sns.barplot(x=df.index, y=0, data=df, ax=ax)
ax.set(xlabel='actions', ylabel='q-values')
fig.canvas.draw()
X = np.array(fig.canvas.renderer.buffer_rgba())
Image.fromarray(X)
# Image.fromarray(X)
rgb_image = np.array(Image.fromarray(X).convert('RGB'))
plt.close(fig)
q_value_rgb_array = rgb_image
return np.append(env_rgb_array, q_value_rgb_array, axis=1)
else:
super().render(mode)
# TRY NOT TO MODIFY: setup the environment
experiment_name = f"{args.gym_id}__{args.exp_name}__{args.seed}__{int(time.time())}"
writer = SummaryWriter(f"runs/{experiment_name}")
writer.add_text('hyperparameters', "|param|value|\n|-|-|\n%s" % (
'\n'.join([f"|{key}|{value}|" for key, value in vars(args).items()])))
if args.prod_mode:
import wandb
wandb.init(project=args.wandb_project_name, entity=args.wandb_entity, sync_tensorboard=True, config=vars(args), name=experiment_name, monitor_gym=True, save_code=True)
writer = SummaryWriter(f"/tmp/{experiment_name}")
# TRY NOT TO MODIFY: seeding
device = torch.device('cuda' if torch.cuda.is_available() and args.cuda else 'cpu')
env = gym.make(args.gym_id)
env = wrap_atari(env)
env = gym.wrappers.RecordEpisodeStatistics(env) # records episode reward in `info['episode']['r']`
if args.capture_video:
env = QValueVisualizationWrapper(env)
env = Monitor(env, f'videos/{experiment_name}')
env = wrap_deepmind(
env,
clip_rewards=True,
frame_stack=True,
scale=False,
)
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
torch.backends.cudnn.deterministic = args.torch_deterministic
env.seed(args.seed)
env.action_space.seed(args.seed)
env.observation_space.seed(args.seed)
# respect the default timelimit
assert isinstance(env.action_space, Discrete), "only discrete action space is supported"
# modified from https://github.com/seungeunrho/minimalRL/blob/master/dqn.py#
class ReplayBuffer():
def __init__(self, buffer_limit):
self.buffer = collections.deque(maxlen=buffer_limit)
def put(self, transition):
self.buffer.append(transition)
def sample(self, n):
mini_batch = random.sample(self.buffer, n)
s_lst, a_lst, r_lst, s_prime_lst, done_mask_lst = [], [], [], [], []
for transition in mini_batch:
s, a, r, s_prime, done_mask = transition
s_lst.append(s)
a_lst.append(a)
r_lst.append(r)
s_prime_lst.append(s_prime)
done_mask_lst.append(done_mask)
return np.array(s_lst), np.array(a_lst), \
np.array(r_lst), np.array(s_prime_lst), \
np.array(done_mask_lst)
# ALGO LOGIC: initialize agent here:
# tricks taken from https://github.com/cpnota/autonomous-learning-library/blob/6d1111afce0d1582de463326f7d078a86e850551/all/presets/atari/models/__init__.py#L16
# apparently matters
class Linear0(nn.Linear):
def reset_parameters(self):
nn.init.constant_(self.weight, 0.0)
if self.bias is not None:
nn.init.constant_(self.bias, 0.0)
class Scale(nn.Module):
def __init__(self, scale):
super().__init__()
self.scale = scale
def forward(self, x):
return x * self.scale
class QNetwork(nn.Module):
def __init__(self, frames=4):
super(QNetwork, self).__init__()
self.network = nn.Sequential(
Scale(1/255),
nn.Conv2d(frames, 32, 8, stride=4),
nn.ReLU(),
nn.Conv2d(32, 64, 4, stride=2),
nn.ReLU(),
nn.Conv2d(64, 64, 3, stride=1),
nn.ReLU(),
nn.Flatten(),
nn.Linear(3136, 512),
nn.ReLU(),
Linear0(512, env.action_space.n)
)
def forward(self, x):
x = torch.Tensor(x).to(device)
return self.network(x)
def linear_schedule(start_e: float, end_e: float, duration: int, t: int):
slope = (end_e - start_e) / duration
return max(slope * t + start_e, end_e)
rb = ReplayBuffer(args.buffer_size)
q_network = QNetwork().to(device)
target_network = QNetwork().to(device)
target_network.load_state_dict(q_network.state_dict())
optimizer = optim.Adam(q_network.parameters(), lr=args.learning_rate)
loss_fn = nn.MSELoss()
print(device.__repr__())
print(q_network)
# TRY NOT TO MODIFY: start the game
obs = env.reset()
episode_reward = 0
for global_step in range(args.total_timesteps):
# ALGO LOGIC: put action logic here
epsilon = linear_schedule(args.start_e, args.end_e, args.exploration_fraction*args.total_timesteps, global_step)
obs = np.array(obs)
logits = q_network.forward(obs.reshape((1,)+obs.shape))
if args.capture_video:
env.set_q_values(logits.tolist())
if random.random() < epsilon:
action = env.action_space.sample()
else:
action = torch.argmax(logits, dim=1).tolist()[0]
# TRY NOT TO MODIFY: execute the game and log data.
next_obs, reward, done, info = env.step(action)
episode_reward += reward
# TRY NOT TO MODIFY: record rewards for plotting purposes
if 'episode' in info.keys():
print(f"global_step={global_step}, episode_reward={info['episode']['r']}")
writer.add_scalar("charts/episode_reward", info['episode']['r'], global_step)
writer.add_scalar("charts/epsilon", epsilon, global_step)
# ALGO LOGIC: training.
rb.put((obs, action, reward, next_obs, done))
if global_step > args.learning_starts and global_step % args.train_frequency == 0:
s_obs, s_actions, s_rewards, s_next_obses, s_dones = rb.sample(args.batch_size)
with torch.no_grad():
# target_max = torch.max(target_network.forward(s_next_obses), dim=1)[0]
current_value = q_network.forward(s_next_obses)
target_value = target_network.forward(s_next_obses)
target_max = target_value.gather(1, torch.max(current_value, 1)[1].unsqueeze(1)).squeeze(1)
td_target = torch.Tensor(s_rewards).to(device) + args.gamma * target_max * (1 - torch.Tensor(s_dones).to(device))
old_val = q_network.forward(s_obs).gather(1, torch.LongTensor(s_actions).view(-1,1).to(device)).squeeze()
loss = loss_fn(td_target, old_val)
writer.add_scalar("losses/td_loss", loss, global_step)
# optimize the midel
optimizer.zero_grad()
loss.backward()
nn.utils.clip_grad_norm_(list(q_network.parameters()), args.max_grad_norm)
optimizer.step()
# update the target network
if global_step % args.target_network_frequency == 0:
target_network.load_state_dict(q_network.state_dict())
# TRY NOT TO MODIFY: CRUCIAL step easy to overlook
obs = next_obs
if done:
# important to note that because `EpisodicLifeEnv` wrapper is applied,
# the real episode reward is actually the sum of episode reward of 5 lives
# which we record through `info['episode']['r']` provided by gym.wrappers.RecordEpisodeStatistics
obs, episode_reward = env.reset(), 0
env.close()
writer.close()
| 38.129758 | 171 | 0.632742 | [
"MIT"
] | HelgeS/cleanrl | cleanrl/experiments/dqn2_atari_visual.py | 22,039 | Python |
#coding:utf-8
'''
filename:get_numbers.py
chap:8
subject:2
conditions:file [data],contains: numbers,annotations,empty line
solution:function get_numbers
'''
import sys
def get_numbers(file):
f = None
numbers = []
try:
with open(file,'rt') as f:
for line in f:
try:
numbers.append(int(line))
except ValueError as e:
print('PASS:this line is not pure number:',e)
except OSError as e:
print('Opening file error:',e)
except BaseException as e:
print('Something is wrong :',e)
return numbers
if __name__ == '__main__':
numbers = get_numbers(sys.argv[1])
print(numbers)
| 21.028571 | 67 | 0.569293 | [
"MIT"
] | marble-git/python-laoqi | chap8/get_numbers.py | 736 | Python |
#Author-HeNeos
#Description-Many triangles, I love triangles
import adsk.core, adsk.fusion, adsk.cam, traceback
import math
def get_points(n, angle, r):
ans = [[0.0, 0.0]]*n
for i in range(0, n):
ans[i] = [r*math.cos(angle + 2*i*math.pi/n), r*math.sin(angle + 2*i*math.pi/n)]
return ans
def run(context):
try:
app = adsk.core.Application.get()
ui = app.userInterface
ui.messageBox('Are you ready')
product = app.activeProduct
design = adsk.fusion.Design.cast(product)
rootComp = design.rootComponent
sketches = rootComp.sketches
xyPlane = rootComp.xYConstructionPlane
# Create a new ObjectCollection.
revolves = rootComp.features.revolveFeatures
r = 4
loftFeats = rootComp.features.loftFeatures
loftInput = loftFeats.createInput(adsk.fusion.FeatureOperations.NewBodyFeatureOperation)
loftSectionsObj = loftInput.loftSections
n = 6
for i in range(0, 100):
angle = (math.pi)*abs(math.sin(i/10))
ctorPlanes = rootComp.constructionPlanes
plane = ctorPlanes.createInput()
offset = adsk.core.ValueInput.createByString(str(i)+" cm")
plane.setByOffset(xyPlane, offset)
Plane = ctorPlanes.add(plane)
sketch = sketches.add(Plane)
lines = sketch.sketchCurves.sketchLines
Points = []
Lines = []
p = get_points(n, angle, r)
for j in range(0, n):
point = adsk.core.Point3D.create(p[j][0], p[j][1], 0)
Points.append(point)
for j in range(0, n-1):
line = lines.addByTwoPoints(Points[j], Points[j+1])
Lines.append(line)
Lines.append(lines.addByTwoPoints(Points[n-1], Points[0]))
for i in range(0, n-1):
sketch.sketchCurves.sketchArcs.addFillet(Lines[i], Lines[i].endSketchPoint.geometry, Lines[i+1], Lines[i+1].startSketchPoint.geometry, 0.5)
sketch.sketchCurves.sketchArcs.addFillet(Lines[n-1], Lines[n-1].endSketchPoint.geometry, Lines[0], Lines[0].startSketchPoint.geometry, 0.5)
profile = sketch.profiles.item(0)
sketch.isVisible = False
Plane.isLightBulbOn = False
loftSectionsObj.add(profile)
loftInput.isSolid=True
loftFeats.add(loftInput)
except:
if ui:
ui.messageBox('Failed:\n{}'.format(traceback.format_exc()))
#axis = lines.addByTwoPoints(adsk.core.Point3D.create(-1,-4,0), adsk.core.Point3D.create(1,-4,0))
#circle1 = circles.addByCenterRadius(adsk.core.Point3D.create(0,0,0), 2)
def stop(context):
try:
app = adsk.core.Application.get()
ui = app.userInterface
ui.messageBox('Finished')
except:
if ui:
ui.messageBox('Failed:\n{}'.format(traceback.format_exc()))
| 35.714286 | 155 | 0.595 | [
"MIT"
] | HeNeos/autodesk_scripts | Fusion/fillet_polygon.py | 3,000 | Python |
from django.urls import path
from . import views
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
path('settings', views.project_settings, name='settings'),
path('envs', views.os_envs, name='envs'),
]
| 26 | 62 | 0.723776 | [
"MIT"
] | Nenu1985/blog | products/urls.py | 286 | Python |
# coding: utf-8
#
# Copyright 2019 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for core.domain.prod_validation_jobs_one_off."""
import ast
import datetime
import math
import random
import time
import types
from constants import constants
from core import jobs_registry
from core.domain import collection_domain
from core.domain import collection_services
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import feedback_services
from core.domain import prod_validation_jobs_one_off
from core.domain import rating_services
from core.domain import recommendations_services
from core.domain import rights_manager
from core.domain import story_domain
from core.domain import story_services
from core.domain import subscription_services
from core.domain import topic_domain
from core.domain import topic_services
from core.domain import user_services
from core.platform import models
from core.platform.taskqueue import gae_taskqueue_services as taskqueue_services
from core.tests import test_utils
import feconf
from google.appengine.api import datastore_types
from google.appengine.ext import db
gae_search_services = models.Registry.import_search_services()
USER_EMAIL = '[email protected]'
USER_NAME = 'username'
CURRENT_DATETIME = datetime.datetime.utcnow()
(
activity_models, audit_models, base_models,
collection_models, config_models, email_models,
exp_models, feedback_models, file_models,
recommendations_models, story_models,
user_models,) = (
models.Registry.import_models([
models.NAMES.activity, models.NAMES.audit, models.NAMES.base_model,
models.NAMES.collection, models.NAMES.config, models.NAMES.email,
models.NAMES.exploration, models.NAMES.feedback, models.NAMES.file,
models.NAMES.recommendations, models.NAMES.story,
models.NAMES.user]))
OriginalDatetimeType = datetime.datetime
class PatchedDatetimeType(type):
"""Validates the datetime instances."""
def __instancecheck__(cls, other):
"""Validates whether the given instance is a datatime
instance.
"""
return isinstance(other, OriginalDatetimeType)
class MockDatetime13Hours(datetime.datetime):
__metaclass__ = PatchedDatetimeType
@classmethod
def utcnow(cls):
"""Returns the current date and time 13 hours behind UTC."""
return CURRENT_DATETIME - datetime.timedelta(hours=13)
def run_job_and_check_output(
self, expected_output, sort=False, literal_eval=False):
"""Helper function to run job and compare output."""
job_id = self.job_class.create_new()
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 0)
self.job_class.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.process_and_flush_pending_tasks()
actual_output = self.job_class.get_output(job_id)
if literal_eval:
actual_output_dict = {}
expected_output_dict = {}
for item in [ast.literal_eval(value) for value in actual_output]:
value = item[1]
if isinstance(value, list):
value = sorted(value)
actual_output_dict[item[0]] = value
for item in [ast.literal_eval(value) for value in expected_output]:
value = item[1]
if isinstance(value, list):
value = sorted(value)
expected_output_dict[item[0]] = value
self.assertEqual(
sorted(actual_output_dict.keys()),
sorted(expected_output_dict.keys()))
for key in actual_output_dict:
self.assertEqual(actual_output_dict[key], expected_output_dict[key])
elif sort:
self.assertEqual(sorted(actual_output), sorted(expected_output))
else:
self.assertEqual(actual_output, expected_output)
def update_datastore_types_for_mock_datetime():
"""Updates datastore types for MockDatetime13Hours to ensure that validation
of ndb datetime properties does not fail.
"""
# pylint: disable=protected-access
datastore_types._VALIDATE_PROPERTY_VALUES[MockDatetime13Hours] = (
datastore_types.ValidatePropertyNothing)
datastore_types._PACK_PROPERTY_VALUES[MockDatetime13Hours] = (
datastore_types.PackDatetime)
datastore_types._PROPERTY_MEANINGS[MockDatetime13Hours] = (
datastore_types.entity_pb.Property.GD_WHEN)
# pylint: enable=protected-access
class MockModel(base_models.BaseModel):
pass
class MockSnapshotModel(base_models.BaseModel):
commit_type = 'edit'
commit_cmds = []
class MockBaseModelValidator(prod_validation_jobs_one_off.BaseModelValidator):
pass
class MockSummaryModelValidator(
prod_validation_jobs_one_off.BaseSummaryModelValidator):
@classmethod
def _get_external_id_relationships(cls, item):
return {}
class MockSnapshotContentModelValidator(
prod_validation_jobs_one_off.BaseSnapshotContentModelValidator):
@classmethod
def _get_external_id_relationships(cls, item):
return {}
class MockSnapshotMetadataModelValidator(
prod_validation_jobs_one_off.BaseSnapshotMetadataModelValidator):
EXTERNAL_MODEL_NAME = 'external model'
@classmethod
def _get_external_id_relationships(cls, item):
return {
'external_model_ids': (MockModel, [])
}
class NotImplementedErrorTests(test_utils.GenericTestBase):
def setUp(self):
super(NotImplementedErrorTests, self).setUp()
self.item = MockModel(id='mockmodel')
self.item.put()
def test_error_is_raised_if_fetch_external_properties_is_undefined(self):
with self.assertRaises(NotImplementedError):
MockBaseModelValidator().validate(self.item)
def test_error_is_get_external_model_properties_is_undefined(self):
with self.assertRaises(NotImplementedError):
MockSummaryModelValidator().validate(self.item)
def test_error_is_raised_if_external_model_name_is_undefined(self):
with self.assertRaisesRegexp(
Exception, 'External model name should be specified'):
MockSnapshotContentModelValidator().validate(self.item)
def test_error_is_raised_if_get_change_domain_class_is_undefined(self):
with self.assertRaises(NotImplementedError):
snapshot_model = MockSnapshotModel(id='mockmodel')
snapshot_model.put()
MockSnapshotMetadataModelValidator().validate(snapshot_model)
def test_error_is_raised_if_entity_classes_to_map_over_is_undefined(self):
job_class = prod_validation_jobs_one_off.ProdValidationAuditOneOffJob
with self.assertRaises(NotImplementedError), self.swap(
jobs_registry, 'ONE_OFF_JOB_MANAGERS', [job_class]):
job_id = job_class.create_new()
job_class.enqueue(job_id)
self.process_and_flush_pending_tasks()
class ActivityReferencesModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(ActivityReferencesModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.owner = user_services.UserActionsInfo(self.owner_id)
exploration = exp_domain.Exploration.create_default_exploration(
'1exp', title='title', category='category')
exp_services.save_new_exploration(self.owner_id, exploration)
collection = collection_domain.Collection.create_default_collection(
'1col', title='title', category='category')
collection_services.save_new_collection(self.owner_id, collection)
self.model_instance = (
activity_models.ActivityReferencesModel.get_or_create('featured'))
self.model_instance.activity_references = [{
'type': constants.ACTIVITY_TYPE_EXPLORATION,
'id': '1exp',
}, {
'type': constants.ACTIVITY_TYPE_COLLECTION,
'id': '1col',
}]
self.model_instance.put()
self.job_class = (
prod_validation_jobs_one_off.ActivityReferencesModelAuditOneOffJob)
def test_standard_model(self):
expected_output = [u'[u\'fully-validated ActivityReferencesModel\', 1]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ActivityReferencesModel\', '
'[u\'Entity id featured: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.created_on, self.model_instance.last_updated
)]
run_job_and_check_output(self, expected_output)
def test_model_with_last_updated_greater_than_current_time(self):
expected_output = [(
u'[u\'failed validation check for current time check of '
'ActivityReferencesModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output)
def test_model_with_missing_id_in_activity_references(self):
self.model_instance.activity_references = [{
'type': 'exploration',
}]
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for fetch properties of '
'ActivityReferencesModel\', '
'[u"Entity id featured: Entity properties cannot be fetched '
'completely with the error \'id\'"]]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_type_in_activity_references(self):
self.model_instance.activity_references = [{
'type': 'invalid_type',
'id': '0'
}]
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for domain object check of '
'ActivityReferencesModel\', '
'[u\'Entity id featured: Entity fails domain validation with the '
'error Invalid activity type: invalid_type\']]')]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_id_in_activity_references(self):
self.model_instance.activity_references = [{
'type': 'exploration',
'id': '1col'
}]
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for exploration_ids field check of '
'ActivityReferencesModel\', '
'[u"Entity id featured: based on field exploration_ids having '
'value 1col, expect model ExplorationModel with id 1col but '
'it doesn\'t exist"]]')]
run_job_and_check_output(self, expected_output)
def test_mock_model_with_invalid_id(self):
model_instance_with_invalid_id = (
activity_models.ActivityReferencesModel(id='invalid'))
model_instance_with_invalid_id.put()
expected_output = [(
u'[u\'fully-validated ActivityReferencesModel\', 1]'
), (
u'[u\'failed validation check for model id check of '
'ActivityReferencesModel\', '
'[u\'Entity id invalid: Entity id does not match regex pattern\']]'
)]
run_job_and_check_output(self, expected_output, sort=True)
class RoleQueryAuditModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(RoleQueryAuditModelValidatorTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
admin_model = user_models.UserSettingsModel.get_by_id(self.admin_id)
admin_model.role = feconf.ROLE_ID_ADMIN
admin_model.put()
model_id = '%s.%s.%s.%s' % (
self.admin_id, int(math.floor(time.time())),
feconf.ROLE_ACTION_UPDATE, random.randint(0, 1000))
self.model_instance = audit_models.RoleQueryAuditModel(
id=model_id, user_id=self.admin_id,
intent=feconf.ROLE_ACTION_UPDATE, role='c', username='d')
self.model_instance.put()
self.job_class = (
prod_validation_jobs_one_off.RoleQueryAuditModelAuditOneOffJob)
def test_standard_model(self):
expected_output = [u'[u\'fully-validated RoleQueryAuditModel\', 1]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of RoleQueryAuditModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.id, self.model_instance.created_on,
self.model_instance.last_updated
)]
run_job_and_check_output(self, expected_output)
def test_model_with_last_updated_greater_than_current_time(self):
expected_output = [(
u'[u\'failed validation check for current time check of '
'RoleQueryAuditModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output)
def test_model_with_non_existent_user_id(self):
user_models.UserSettingsModel.get(self.admin_id).delete()
expected_output = [(
u'[u\'failed validation check for user_ids field check of '
'RoleQueryAuditModel\', '
'[u"Entity id %s: based on field user_ids having value '
'%s, expect model UserSettingsModel with '
'id %s but it doesn\'t exist"]]') % (
self.model_instance.id, self.admin_id, self.admin_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_id(self):
model_invalid_id = '%s.%s.%s.%s' % (
'a', int(math.floor(time.time())), feconf.ROLE_ACTION_UPDATE,
random.randint(0, 1000))
model_instance_with_invalid_id = audit_models.RoleQueryAuditModel(
id=model_invalid_id, user_id=self.admin_id,
intent=feconf.ROLE_ACTION_UPDATE, role='c', username='d')
model_instance_with_invalid_id.put()
expected_output = [(
u'[u\'fully-validated RoleQueryAuditModel\', 1]'
), (
u'[u\'failed validation check for model id check of '
'RoleQueryAuditModel\', '
'[u\'Entity id %s: Entity id does not match regex pattern\']]'
) % model_invalid_id]
run_job_and_check_output(self, expected_output, sort=True)
class CollectionModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(CollectionModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
language_codes = ['ar', 'en', 'en']
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
objective='objective%d' % i,
language_code=language_codes[i]
) for i in xrange(3)]
for index, collection in enumerate(collections):
collection.add_node('%s' % (index * 2))
collection.add_node('%s' % (index * 2 + 1))
collection_services.save_new_collection(self.owner_id, collection)
self.model_instance_0 = collection_models.CollectionModel.get_by_id('0')
self.model_instance_1 = collection_models.CollectionModel.get_by_id('1')
self.model_instance_2 = collection_models.CollectionModel.get_by_id('2')
self.job_class = (
prod_validation_jobs_one_off.CollectionModelAuditOneOffJob)
def test_standard_operation(self):
collection_services.update_collection(
self.owner_id, '0', [{
'cmd': 'edit_collection_property',
'property_name': 'title',
'new_value': 'New title'
}], 'Changes.')
expected_output = [
u'[u\'fully-validated CollectionModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.commit(
feconf.SYSTEM_COMMITTER_ID, 'created_on test', [])
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of CollectionModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
),
u'[u\'fully-validated CollectionModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
self.model_instance_2.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
expected_output = [(
u'[u\'failed validation check for current time check of '
'CollectionModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_collection_schema(self):
expected_output = [
(
u'[u\'failed validation check for domain object check of '
'CollectionModel\', '
'[u\'Entity id %s: Entity fails domain validation with the '
'error Invalid language code: %s\']]'
) % (self.model_instance_0.id, self.model_instance_0.language_code),
u'[u\'fully-validated CollectionModel\', 2]']
with self.swap(
constants, 'ALL_LANGUAGE_CODES', [{
'code': 'en', 'description': 'English'}]):
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('1').delete(
self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for '
'exploration_ids field check of CollectionModel\', '
'[u"Entity id 0: based on field exploration_ids having value '
'1, expect model ExplorationModel '
'with id 1 but it doesn\'t exist"]]'
),
u'[u\'fully-validated CollectionModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_commit_log_entry_model_failure(self):
collection_services.update_collection(
self.owner_id, '0', [{
'cmd': 'edit_collection_property',
'property_name': 'title',
'new_value': 'New title'
}], 'Changes.')
collection_models.CollectionCommitLogEntryModel.get_by_id(
'collection-0-1').delete()
expected_output = [
(
u'[u\'failed validation check for '
'collection_commit_log_entry_ids field check of '
'CollectionModel\', '
'[u"Entity id 0: based on field '
'collection_commit_log_entry_ids having value '
'collection-0-1, expect model CollectionCommitLogEntryModel '
'with id collection-0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated CollectionModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_summary_model_failure(self):
collection_models.CollectionSummaryModel.get_by_id('0').delete()
expected_output = [
(
u'[u\'failed validation check for collection_summary_ids '
'field check of CollectionModel\', '
'[u"Entity id 0: based on field collection_summary_ids '
'having value 0, expect model CollectionSummaryModel with '
'id 0 but it doesn\'t exist"]]'),
u'[u\'fully-validated CollectionModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_rights_model_failure(self):
collection_models.CollectionRightsModel.get_by_id(
'0').delete(feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for collection_rights_ids '
'field check of CollectionModel\', '
'[u"Entity id 0: based on field collection_rights_ids having '
'value 0, expect model CollectionRightsModel with id 0 but '
'it doesn\'t exist"]]'),
u'[u\'fully-validated CollectionModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
collection_models.CollectionSnapshotMetadataModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of CollectionModel\', '
'[u"Entity id 0: based on field snapshot_metadata_ids having '
'value 0-1, expect model CollectionSnapshotMetadataModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated CollectionModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
collection_models.CollectionSnapshotContentModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of CollectionModel\', '
'[u"Entity id 0: based on field snapshot_content_ids having '
'value 0-1, expect model CollectionSnapshotContentModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated CollectionModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class CollectionSnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(CollectionSnapshotMetadataModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
objective='objective%d' % i,
) for i in xrange(3)]
for index, collection in enumerate(collections):
collection.add_node('%s' % (index * 2))
collection.add_node('%s' % (index * 2 + 1))
if collection.id != '0':
collection_services.save_new_collection(
self.owner_id, collection)
else:
collection_services.save_new_collection(
self.user_id, collection)
self.model_instance_0 = (
collection_models.CollectionSnapshotMetadataModel.get_by_id(
'0-1'))
self.model_instance_1 = (
collection_models.CollectionSnapshotMetadataModel.get_by_id(
'1-1'))
self.model_instance_2 = (
collection_models.CollectionSnapshotMetadataModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.CollectionSnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
collection_services.update_collection(
self.owner_id, '0', [{
'cmd': 'edit_collection_property',
'property_name': 'title',
'new_value': 'New title'
}], 'Changes.')
expected_output = [
u'[u\'fully-validated CollectionSnapshotMetadataModel\', 4]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of CollectionSnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'CollectionSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'CollectionSnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_model_failure(self):
collection_models.CollectionModel.get_by_id('0').delete(
self.user_id, '', [])
expected_output = [
(
u'[u\'failed validation check for collection_ids '
'field check of CollectionSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field collection_ids '
'having value 0, expect model CollectionModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'collection_ids having value 0, expect model '
'CollectionModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'CollectionSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of CollectionSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field committer_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.user_id, self.user_id), (
u'[u\'fully-validated '
'CollectionSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_collection_version_in_model_id(self):
model_with_invalid_version_in_id = (
collection_models.CollectionSnapshotMetadataModel(
id='0-3', committer_id=self.owner_id, commit_type='edit',
commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for collection model '
'version check of CollectionSnapshotMetadataModel\', '
'[u\'Entity id 0-3: Collection model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot metadata model id\']]'
), (
u'[u\'fully-validated CollectionSnapshotMetadataModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'add_collection_node',
}, {
'cmd': 'delete_collection_node',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'delete_collection_node check of '
'CollectionSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'delete_collection_node\', '
'u\'invalid_attribute\': u\'invalid\'} failed with error: '
'The following required attributes are missing: '
'exploration_id, The following extra attributes '
'are present: invalid_attribute"]]'
), (
u'[u\'failed validation check for commit cmd '
'add_collection_node check of '
'CollectionSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'add_collection_node\'} failed '
'with error: The following required attributes are '
'missing: exploration_id"]]'
), u'[u\'fully-validated CollectionSnapshotMetadataModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class CollectionSnapshotContentModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(CollectionSnapshotContentModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
objective='objective%d' % i,
) for i in xrange(3)]
for index, collection in enumerate(collections):
collection.add_node('%s' % (index * 2))
collection.add_node('%s' % (index * 2 + 1))
collection_services.save_new_collection(self.owner_id, collection)
self.model_instance_0 = (
collection_models.CollectionSnapshotContentModel.get_by_id(
'0-1'))
self.model_instance_1 = (
collection_models.CollectionSnapshotContentModel.get_by_id(
'1-1'))
self.model_instance_2 = (
collection_models.CollectionSnapshotContentModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.CollectionSnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
collection_services.update_collection(
self.owner_id, '0', [{
'cmd': 'edit_collection_property',
'property_name': 'title',
'new_value': 'New title'
}], 'Changes.')
expected_output = [
u'[u\'fully-validated CollectionSnapshotContentModel\', 4]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of CollectionSnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'CollectionSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'CollectionSnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_model_failure(self):
collection_models.CollectionModel.get_by_id('0').delete(
self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for collection_ids '
'field check of CollectionSnapshotContentModel\', '
'[u"Entity id 0-1: based on field collection_ids '
'having value 0, expect model CollectionModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'collection_ids having value 0, expect model '
'CollectionModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'CollectionSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_collection_version_in_model_id(self):
model_with_invalid_version_in_id = (
collection_models.CollectionSnapshotContentModel(
id='0-3'))
model_with_invalid_version_in_id.content = {}
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for collection model '
'version check of CollectionSnapshotContentModel\', '
'[u\'Entity id 0-3: Collection model corresponding to '
'id 0 has a version 1 which is less than '
'the version 3 in snapshot content model id\']]'
), (
u'[u\'fully-validated CollectionSnapshotContentModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
class CollectionRightsModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(CollectionRightsModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.owner = user_services.UserActionsInfo(self.owner_id)
editor_email = '[email protected]'
viewer_email = '[email protected]'
self.signup(editor_email, 'editor')
self.signup(viewer_email, 'viewer')
self.editor_id = self.get_user_id_from_email(editor_email)
self.viewer_id = self.get_user_id_from_email(viewer_email)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
objective='objective%d' % i,
) for i in xrange(3)]
for index, collection in enumerate(collections):
collection.add_node('%s' % (index * 2))
collection.add_node('%s' % (index * 2 + 1))
collection_services.save_new_collection(self.owner_id, collection)
rights_manager.assign_role_for_collection(
self.owner, '0', self.editor_id, rights_manager.ROLE_EDITOR)
rights_manager.assign_role_for_collection(
self.owner, '2', self.viewer_id, rights_manager.ROLE_VIEWER)
self.model_instance_0 = (
collection_models.CollectionRightsModel.get_by_id('0'))
self.model_instance_1 = (
collection_models.CollectionRightsModel.get_by_id('1'))
self.model_instance_2 = (
collection_models.CollectionRightsModel.get_by_id('2'))
self.job_class = (
prod_validation_jobs_one_off.CollectionRightsModelAuditOneOffJob)
def test_standard_operation(self):
rights_manager.publish_collection(self.owner, '0')
expected_output = [
u'[u\'fully-validated CollectionRightsModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.commit(
feconf.SYSTEM_COMMITTER_ID, 'created_on test', [])
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of CollectionRightsModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated CollectionRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
self.model_instance_2.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
expected_output = [(
u'[u\'failed validation check for current time check of '
'CollectionRightsModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_first_published_datetime_than_current_time(self):
rights_manager.publish_collection(self.owner, '0')
rights_manager.publish_collection(self.owner, '1')
self.model_instance_0.first_published_msec = (
self.model_instance_0.first_published_msec * 1000000.0)
self.model_instance_0.commit(feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for first published msec check '
'of CollectionRightsModel\', '
'[u\'Entity id 0: The first_published_msec field has a '
'value %s which is greater than the time when the job was '
'run\']]'
) % (self.model_instance_0.first_published_msec),
u'[u\'fully-validated CollectionRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_model_failure(self):
collection_models.CollectionModel.get_by_id('0').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for collection_ids '
'field check of CollectionRightsModel\', '
'[u"Entity id 0: based on field collection_ids having '
'value 0, expect model CollectionModel with id 0 but '
'it doesn\'t exist"]]'),
u'[u\'fully-validated CollectionRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_owner_user_model_failure(self):
rights_manager.assign_role_for_collection(
self.owner, '0', self.user_id, rights_manager.ROLE_OWNER)
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for owner_user_ids '
'field check of CollectionRightsModel\', '
'[u"Entity id 0: based on field owner_user_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]') % (self.user_id, self.user_id),
u'[u\'fully-validated CollectionRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_editor_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.editor_id).delete()
expected_output = [
(
u'[u\'failed validation check for editor_user_ids '
'field check of CollectionRightsModel\', '
'[u"Entity id 0: based on field editor_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.editor_id, self.editor_id),
u'[u\'fully-validated CollectionRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_viewer_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.viewer_id).delete()
expected_output = [
(
u'[u\'failed validation check for viewer_user_ids '
'field check of CollectionRightsModel\', '
'[u"Entity id 2: based on field viewer_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.viewer_id, self.viewer_id),
u'[u\'fully-validated CollectionRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of CollectionRightsModel\', '
'[u"Entity id 0: based on field snapshot_metadata_ids having '
'value 0-1, expect model '
'CollectionRightsSnapshotMetadataModel '
'with id 0-1 but it doesn\'t exist"]]'
),
u'[u\'fully-validated CollectionRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of CollectionRightsModel\', '
'[u"Entity id 0: based on field snapshot_content_ids having '
'value 0-1, expect model CollectionRightsSnapshotContentModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated CollectionRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class CollectionRightsSnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(CollectionRightsSnapshotMetadataModelValidatorTests, self).setUp(
)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
objective='objective%d' % i,
) for i in xrange(3)]
for index, collection in enumerate(collections):
collection.add_node('%s' % (index * 2))
collection.add_node('%s' % (index * 2 + 1))
if collection.id != '0':
collection_services.save_new_collection(
self.owner_id, collection)
else:
collection_services.save_new_collection(
self.user_id, collection)
self.model_instance_0 = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'0-1'))
self.model_instance_1 = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'1-1'))
self.model_instance_2 = (
collection_models.CollectionRightsSnapshotMetadataModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.CollectionRightsSnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated CollectionRightsSnapshotMetadataModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of CollectionRightsSnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'CollectionRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'CollectionRightsSnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_rights_model_failure(self):
collection_models.CollectionRightsModel.get_by_id('0').delete(
self.user_id, '', [])
expected_output = [
(
u'[u\'failed validation check for collection_rights_ids '
'field check of CollectionRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field collection_rights_ids '
'having value 0, expect model CollectionRightsModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'collection_rights_ids having value 0, expect model '
'CollectionRightsModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'CollectionRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of CollectionRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field committer_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.user_id, self.user_id), (
u'[u\'fully-validated '
'CollectionRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_collection_version_in_model_id(self):
model_with_invalid_version_in_id = (
collection_models.CollectionRightsSnapshotMetadataModel(
id='0-3', committer_id=self.owner_id, commit_type='edit',
commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for collection rights model '
'version check of CollectionRightsSnapshotMetadataModel\', '
'[u\'Entity id 0-3: CollectionRights model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot metadata model id\']]'
), (
u'[u\'fully-validated '
'CollectionRightsSnapshotMetadataModel\', 3]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'change_collection_status',
'old_status': rights_manager.ACTIVITY_STATUS_PUBLIC,
}, {
'cmd': 'release_ownership',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'change_collection_status check of '
'CollectionRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation for '
'command: {u\'old_status\': u\'public\', '
'u\'cmd\': u\'change_collection_status\'} failed with error: '
'The following required attributes are missing: '
'new_status"]]'
), (
u'[u\'failed validation check for commit cmd '
'release_ownership check of '
'CollectionRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'release_ownership\', '
'u\'invalid_attribute\': u\'invalid\'} failed with error: '
'The following extra attributes are present: '
'invalid_attribute"]]'
), (
u'[u\'fully-validated '
'CollectionRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
class CollectionRightsSnapshotContentModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(CollectionRightsSnapshotContentModelValidatorTests, self).setUp(
)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
objective='objective%d' % i,
) for i in xrange(3)]
for index, collection in enumerate(collections):
collection.add_node('%s' % (index * 2))
collection.add_node('%s' % (index * 2 + 1))
collection_services.save_new_collection(self.owner_id, collection)
self.model_instance_0 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'0-1'))
self.model_instance_1 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'1-1'))
self.model_instance_2 = (
collection_models.CollectionRightsSnapshotContentModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.CollectionRightsSnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated CollectionRightsSnapshotContentModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of CollectionRightsSnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'CollectionRightsSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'CollectionRightsSnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_model_failure(self):
collection_models.CollectionRightsModel.get_by_id('0').delete(
self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for collection_rights_ids '
'field check of CollectionRightsSnapshotContentModel\', '
'[u"Entity id 0-1: based on field collection_rights_ids '
'having value 0, expect model CollectionRightsModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'collection_rights_ids having value 0, expect model '
'CollectionRightsModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'CollectionRightsSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_collection_version_in_model_id(self):
model_with_invalid_version_in_id = (
collection_models.CollectionRightsSnapshotContentModel(
id='0-3'))
model_with_invalid_version_in_id.content = {}
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for collection rights model '
'version check of CollectionRightsSnapshotContentModel\', '
'[u\'Entity id 0-3: CollectionRights model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot content model id\']]'
), (
u'[u\'fully-validated CollectionRightsSnapshotContentModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
class CollectionCommitLogEntryModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(CollectionCommitLogEntryModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
objective='objective%d' % i,
) for i in xrange(3)]
for index, collection in enumerate(collections):
collection.add_node('%s' % (index * 2))
collection.add_node('%s' % (index * 2 + 1))
collection_services.save_new_collection(self.owner_id, collection)
self.rights_model_instance = (
collection_models.CollectionCommitLogEntryModel(
id='rights-1-1', user_id=self.owner_id,
username=self.OWNER_USERNAME, collection_id='1',
commit_type='edit', commit_message='', commit_cmds=[],
post_commit_status=constants.ACTIVITY_STATUS_PUBLIC,
post_commit_community_owned=False,
post_commit_is_private=False))
self.rights_model_instance.put()
self.model_instance_0 = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'collection-0-1'))
self.model_instance_1 = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'collection-1-1'))
self.model_instance_2 = (
collection_models.CollectionCommitLogEntryModel.get_by_id(
'collection-2-1'))
self.job_class = (
prod_validation_jobs_one_off
.CollectionCommitLogEntryModelAuditOneOffJob)
def test_standard_operation(self):
collection_services.update_collection(
self.owner_id, '0', [{
'cmd': 'edit_collection_property',
'property_name': 'title',
'new_value': 'New title'
}], 'Changes.')
expected_output = [
u'[u\'fully-validated CollectionCommitLogEntryModel\', 5]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of CollectionCommitLogEntryModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated CollectionCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
self.rights_model_instance.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'CollectionCommitLogEntryModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_model_failure(self):
collection_models.CollectionModel.get_by_id('0').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for collection_ids '
'field check of CollectionCommitLogEntryModel\', '
'[u"Entity id collection-0-1: based on field collection_ids '
'having value 0, expect model CollectionModel with id 0 '
'but it doesn\'t exist", u"Entity id collection-0-2: based '
'on field collection_ids having value 0, expect model '
'CollectionModel with id 0 but it doesn\'t exist"]]'
), u'[u\'fully-validated CollectionCommitLogEntryModel\', 3]']
run_job_and_check_output(
self, expected_output, literal_eval=True)
def test_missing_collection_rights_model_failure(self):
collection_models.CollectionRightsModel.get_by_id('1').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for collection_rights_ids '
'field check of CollectionCommitLogEntryModel\', '
'[u"Entity id rights-1-1: based on field '
'collection_rights_ids having value 1, expect model '
'CollectionRightsModel with id 1 but it doesn\'t exist"]]'
), u'[u\'fully-validated CollectionCommitLogEntryModel\', 3]']
run_job_and_check_output(
self, expected_output, sort=True)
def test_invalid_collection_version_in_model_id(self):
model_with_invalid_version_in_id = (
collection_models.CollectionCommitLogEntryModel.create(
'0', 3, self.owner_id, self.OWNER_USERNAME, 'edit',
'msg', [{}],
constants.ACTIVITY_STATUS_PUBLIC, False))
model_with_invalid_version_in_id.collection_id = '0'
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for collection model '
'version check of CollectionCommitLogEntryModel\', '
'[u\'Entity id %s: Collection model corresponding '
'to id 0 has a version 1 which is less than '
'the version 3 in commit log entry model id\']]'
) % (model_with_invalid_version_in_id.id),
u'[u\'fully-validated CollectionCommitLogEntryModel\', 4]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_id(self):
model_with_invalid_id = (
collection_models.CollectionCommitLogEntryModel(
id='invalid-0-1', user_id=self.owner_id,
username=self.OWNER_USERNAME, commit_type='edit',
commit_message='msg', commit_cmds=[{}],
post_commit_status=constants.ACTIVITY_STATUS_PUBLIC,
post_commit_is_private=False))
model_with_invalid_id.collection_id = '0'
model_with_invalid_id.put()
expected_output = [
(
u'[u\'failed validation check for model id check of '
'CollectionCommitLogEntryModel\', '
'[u\'Entity id %s: Entity id does not match regex pattern\']]'
) % (model_with_invalid_id.id), (
u'[u\'failed validation check for commit cmd check of '
'CollectionCommitLogEntryModel\', [u\'Entity id invalid-0-1: '
'No commit command domain object defined for entity with '
'commands: [{}]\']]'),
u'[u\'fully-validated CollectionCommitLogEntryModel\', 4]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_type(self):
self.model_instance_0.commit_type = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit type check of '
'CollectionCommitLogEntryModel\', '
'[u\'Entity id collection-0-1: Commit type invalid is '
'not allowed\']]'
), u'[u\'fully-validated CollectionCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_post_commit_status(self):
self.model_instance_0.post_commit_status = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit status check '
'of CollectionCommitLogEntryModel\', '
'[u\'Entity id collection-0-1: Post commit status invalid '
'is invalid\']]'
), u'[u\'fully-validated CollectionCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_true_post_commit_is_private(self):
self.model_instance_0.post_commit_status = (
feconf.POST_COMMIT_STATUS_PUBLIC)
self.model_instance_0.post_commit_is_private = True
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit is private '
'check of CollectionCommitLogEntryModel\', '
'[u\'Entity id %s: Post commit status is '
'%s but post_commit_is_private is True\']]'
) % (self.model_instance_0.id, feconf.POST_COMMIT_STATUS_PUBLIC),
u'[u\'fully-validated CollectionCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_false_post_commit_is_private(self):
self.model_instance_0.post_commit_status = (
feconf.POST_COMMIT_STATUS_PRIVATE)
self.model_instance_0.post_commit_is_private = False
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit is private '
'check of CollectionCommitLogEntryModel\', '
'[u\'Entity id %s: Post commit status is '
'%s but post_commit_is_private is False\']]'
) % (self.model_instance_0.id, feconf.POST_COMMIT_STATUS_PRIVATE),
u'[u\'fully-validated CollectionCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'add_collection_node'
}, {
'cmd': 'delete_collection_node',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'delete_collection_node check of '
'CollectionCommitLogEntryModel\', '
'[u"Entity id collection-0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'delete_collection_node\', '
'u\'invalid_attribute\': u\'invalid\'} failed with error: '
'The following required attributes are missing: '
'exploration_id, The following extra attributes '
'are present: invalid_attribute"]]'
), (
u'[u\'failed validation check for commit cmd '
'add_collection_node check of CollectionCommitLogEntryModel\', '
'[u"Entity id collection-0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'add_collection_node\'} '
'failed with error: The following required attributes '
'are missing: exploration_id"]]'),
u'[u\'fully-validated CollectionCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
class CollectionSummaryModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(CollectionSummaryModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.owner = user_services.UserActionsInfo(self.owner_id)
editor_email = '[email protected]'
viewer_email = '[email protected]'
contributor_email = '[email protected]'
self.signup(editor_email, 'editor')
self.signup(viewer_email, 'viewer')
self.signup(contributor_email, 'contributor')
self.editor_id = self.get_user_id_from_email(editor_email)
self.viewer_id = self.get_user_id_from_email(viewer_email)
self.contributor_id = self.get_user_id_from_email(contributor_email)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
language_codes = ['ar', 'en', 'en']
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
objective='objective%d' % i,
language_code=language_codes[i]
) for i in xrange(3)]
for index, collection in enumerate(collections):
collection.add_node('%s' % (index * 2))
collection.add_node('%s' % (index * 2 + 1))
collection.tags = ['math', 'art']
collection_services.save_new_collection(self.owner_id, collection)
rights_manager.assign_role_for_collection(
self.owner, '0', self.editor_id, rights_manager.ROLE_EDITOR)
collection_services.update_collection(
self.contributor_id, '0', [{
'cmd': 'edit_collection_property',
'property_name': 'title',
'new_value': 'New title'
}], 'Changes.')
rights_manager.assign_role_for_collection(
self.owner, '2', self.viewer_id, rights_manager.ROLE_VIEWER)
self.model_instance_0 = (
collection_models.CollectionSummaryModel.get_by_id('0'))
self.model_instance_0.put()
self.model_instance_1 = (
collection_models.CollectionSummaryModel.get_by_id('1'))
self.model_instance_2 = (
collection_models.CollectionSummaryModel.get_by_id('2'))
self.job_class = (
prod_validation_jobs_one_off.CollectionSummaryModelAuditOneOffJob)
def test_standard_operation(self):
rights_manager.publish_collection(self.owner, '0')
collection_services.update_collection(
self.owner_id, '1', [{
'cmd': 'edit_collection_property',
'property_name': 'title',
'new_value': 'New title'
}], 'Changes.')
expected_output = [
u'[u\'fully-validated CollectionSummaryModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of CollectionSummaryModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
collection_services.delete_collection(self.owner_id, '1')
collection_services.delete_collection(self.owner_id, '2')
expected_output = [(
u'[u\'failed validation check for current time check of '
'CollectionSummaryModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_collection_model_failure(self):
collection_model = collection_models.CollectionModel.get_by_id('0')
collection_model.delete(feconf.SYSTEM_COMMITTER_ID, '', [])
self.model_instance_0.collection_model_last_updated = (
collection_model.last_updated)
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for collection_ids '
'field check of CollectionSummaryModel\', '
'[u"Entity id 0: based on field collection_ids having '
'value 0, expect model CollectionModel with id 0 but '
'it doesn\'t exist"]]'),
u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_owner_user_model_failure(self):
rights_manager.assign_role_for_collection(
self.owner, '0', self.user_id, rights_manager.ROLE_OWNER)
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for owner_user_ids '
'field check of CollectionSummaryModel\', '
'[u"Entity id 0: based on field owner_user_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]') % (self.user_id, self.user_id),
u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_editor_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.editor_id).delete()
expected_output = [
(
u'[u\'failed validation check for editor_user_ids '
'field check of CollectionSummaryModel\', '
'[u"Entity id 0: based on field editor_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.editor_id, self.editor_id),
u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_viewer_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.viewer_id).delete()
expected_output = [
(
u'[u\'failed validation check for viewer_user_ids '
'field check of CollectionSummaryModel\', '
'[u"Entity id 2: based on field viewer_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.viewer_id, self.viewer_id),
u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_contributor_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.contributor_id).delete()
expected_output = [
(
u'[u\'failed validation check for contributor_user_ids '
'field check of CollectionSummaryModel\', '
'[u"Entity id 0: based on field contributor_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.contributor_id, self.contributor_id),
u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_contributors_summary(self):
sorted_contributor_ids = sorted(
self.model_instance_0.contributors_summary.keys())
self.model_instance_0.contributors_summary = {'invalid': 1}
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for contributors summary '
'check of CollectionSummaryModel\', '
'[u"Entity id 0: Contributor ids: [u\'%s\', u\'%s\'] do '
'not match the contributor ids obtained using '
'contributors summary: [u\'invalid\']"]]'
) % (sorted_contributor_ids[0], sorted_contributor_ids[1]),
u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_node_count(self):
self.model_instance_0.node_count = 10
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for node count check '
'of CollectionSummaryModel\', '
'[u"Entity id 0: Node count: 10 does not match the number '
'of nodes in collection_contents dict: [{u\'exploration_id\': '
'u\'0\'}, {u\'exploration_id\': u\'1\'}]"]]'
), u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_ratings(self):
self.model_instance_0.ratings = {'1': 0, '2': 1}
self.model_instance_0.put()
self.model_instance_1.ratings = {}
self.model_instance_1.put()
expected_output = [(
u'[u\'failed validation check for ratings check of '
'CollectionSummaryModel\', '
'[u"Entity id 0: Expected ratings for the entity to be empty '
'but received {u\'1\': 0, u\'2\': 1}"]]'
), u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_collection_related_property(self):
self.model_instance_0.title = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for title field check of '
'CollectionSummaryModel\', '
'[u\'Entity id %s: title field in entity: invalid does not '
'match corresponding collection title field: New title\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_collection_rights_related_property(self):
self.model_instance_0.status = 'public'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for status field check of '
'CollectionSummaryModel\', '
'[u\'Entity id %s: status field in entity: public does not '
'match corresponding collection rights status field: '
'private\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated CollectionSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class ConfigPropertyModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(ConfigPropertyModelValidatorTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.model_instance = config_models.ConfigPropertyModel(
id='config_model', value='c')
self.model_instance.commit(feconf.SYSTEM_COMMITTER_ID, [])
self.csrf_model_instance = config_models.ConfigPropertyModel.get_by_id(
'oppia_csrf_secret')
self.job_class = (
prod_validation_jobs_one_off.ConfigPropertyModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated ConfigPropertyModel\', 2]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.commit(self.admin_id, [])
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of ConfigPropertyModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.id,
self.model_instance.created_on,
self.model_instance.last_updated
),
u'[u\'fully-validated ConfigPropertyModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.csrf_model_instance.delete(self.admin_id, '', [{}])
expected_output = [(
u'[u\'failed validation check for current time check of '
'ConfigPropertyModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
config_models.ConfigPropertySnapshotMetadataModel.get_by_id(
'config_model-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of ConfigPropertyModel\', '
'[u"Entity id config_model: based on field '
'snapshot_metadata_ids having '
'value config_model-1, expect model '
'ConfigPropertySnapshotMetadataModel '
'with id config_model-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated ConfigPropertyModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
config_models.ConfigPropertySnapshotContentModel.get_by_id(
'config_model-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of ConfigPropertyModel\', '
'[u"Entity id config_model: based on field '
'snapshot_content_ids having '
'value config_model-1, expect model '
'ConfigPropertySnapshotContentModel '
'with id config_model-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated ConfigPropertyModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
class ConfigPropertySnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(ConfigPropertySnapshotMetadataModelValidatorTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.config_model = config_models.ConfigPropertyModel(
id='config_model', value='c')
self.config_model.commit(self.admin_id, [])
user_models.UserSettingsModel(
id=feconf.SYSTEM_COMMITTER_ID, email='[email protected]').put()
self.model_instance = (
config_models.ConfigPropertySnapshotMetadataModel.get_by_id(
'config_model-1'))
self.csrf_model_instance = (
config_models.ConfigPropertySnapshotMetadataModel.get_by_id(
'oppia_csrf_secret-1'))
self.job_class = (
prod_validation_jobs_one_off
.ConfigPropertySnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
self.config_model.commit(self.admin_id, [])
expected_output = [
u'[u\'fully-validated ConfigPropertySnapshotMetadataModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of ConfigPropertySnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.id,
self.model_instance.created_on,
self.model_instance.last_updated),
u'[u\'fully-validated ConfigPropertySnapshotMetadataModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.csrf_model_instance.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ConfigPropertySnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_config_property_model_failure(self):
self.config_model.delete(self.admin_id, '', [])
expected_output = [
(
u'[u\'failed validation check for config_property_ids '
'field check of ConfigPropertySnapshotMetadataModel\', '
'[u"Entity id config_model-1: based on field '
'config_property_ids having value config_model, '
'expect model ConfigPropertyModel with '
'id config_model but it doesn\'t exist", '
'u"Entity id config_model-2: based on field '
'config_property_ids having value config_model, expect model '
'ConfigPropertyModel with id config_model but it doesn\'t '
'exist"]]'
),
u'[u\'fully-validated ConfigPropertySnapshotMetadataModel\', 1]']
run_job_and_check_output(self, expected_output, literal_eval=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.admin_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of ConfigPropertySnapshotMetadataModel\', '
'[u"Entity id config_model-1: based on field committer_ids '
'having value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.admin_id, self.admin_id),
u'[u\'fully-validated ConfigPropertySnapshotMetadataModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_config_property_model_version_in_model_id(self):
model_with_invalid_version_in_id = (
config_models.ConfigPropertySnapshotMetadataModel(
id='config_model-3', committer_id=self.admin_id,
commit_type='edit',
commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for config property model '
'version check of ConfigPropertySnapshotMetadataModel\', '
'[u\'Entity id config_model-3: ConfigProperty model '
'corresponding to id config_model has a version 1 '
'which is less than the version 3 in '
'snapshot metadata model id\']]'
),
u'[u\'fully-validated ConfigPropertySnapshotMetadataModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance.commit_cmds = [{
'cmd': 'change_property_value',
'invalid_attribute': 'invalid'
}]
self.model_instance.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'change_property_value check of '
'ConfigPropertySnapshotMetadataModel\', '
'[u"Entity id config_model-1: Commit command domain '
'validation for command: {u\'cmd\': '
'u\'change_property_value\', '
'u\'invalid_attribute\': u\'invalid\'} failed with error: '
'The following required attributes are missing: '
'new_value, The following extra attributes are present: '
'invalid_attribute"]]'
), u'[u\'fully-validated ConfigPropertySnapshotMetadataModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
class ConfigPropertySnapshotContentModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(ConfigPropertySnapshotContentModelValidatorTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.config_model = config_models.ConfigPropertyModel(
id='config_model', value='c')
self.config_model.commit(self.admin_id, [])
user_models.UserSettingsModel(
id=feconf.SYSTEM_COMMITTER_ID, email='[email protected]').put()
self.model_instance = (
config_models.ConfigPropertySnapshotContentModel.get_by_id(
'config_model-1'))
self.csrf_model_instance = (
config_models.ConfigPropertySnapshotContentModel.get_by_id(
'oppia_csrf_secret-1'))
self.job_class = (
prod_validation_jobs_one_off
.ConfigPropertySnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
self.config_model.commit(self.admin_id, [])
expected_output = [
u'[u\'fully-validated ConfigPropertySnapshotContentModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of ConfigPropertySnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.id,
self.model_instance.created_on,
self.model_instance.last_updated
),
u'[u\'fully-validated ConfigPropertySnapshotContentModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.csrf_model_instance.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ConfigPropertySnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_config_property_model_failure(self):
self.config_model.delete(self.admin_id, '', [])
expected_output = [
(
u'[u\'failed validation check for config_property_ids '
'field check of ConfigPropertySnapshotContentModel\', '
'[u"Entity id config_model-1: based on field '
'config_property_ids having value config_model, '
'expect model ConfigPropertyModel with '
'id config_model but it doesn\'t exist", '
'u"Entity id config_model-2: based on field '
'config_property_ids having value config_model, expect model '
'ConfigPropertyModel with id config_model but it '
'doesn\'t exist"]]'
),
u'[u\'fully-validated ConfigPropertySnapshotContentModel\', 1]']
run_job_and_check_output(self, expected_output, literal_eval=True)
def test_invalid_config_property_model_version_in_model_id(self):
model_with_invalid_version_in_id = (
config_models.ConfigPropertySnapshotContentModel(
id='config_model-3'))
model_with_invalid_version_in_id.content = {}
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for config property model '
'version check of ConfigPropertySnapshotContentModel\', '
'[u\'Entity id config_model-3: ConfigProperty model '
'corresponding to id config_model has a version 1 '
'which is less than the version 3 in snapshot '
'content model id\']]'
),
u'[u\'fully-validated ConfigPropertySnapshotContentModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class SentEmailModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(SentEmailModelValidatorTests, self).setUp()
def mock_generate_hash(
unused_cls, unused_recipient_id, unused_email_subject,
unused_email_body):
return 'Email Hash'
self.sender_email = '[email protected]'
self.sender_id = 'sender'
self.sender_model = user_models.UserSettingsModel(
id=self.sender_id, email=self.sender_email)
self.sender_model.put()
self.recipient_email = '[email protected]'
self.recipient_id = 'recipient'
self.recipient_model = user_models.UserSettingsModel(
id=self.recipient_id, email=self.recipient_email)
self.recipient_model.put()
with self.swap(
email_models.SentEmailModel, '_generate_hash',
types.MethodType(mock_generate_hash, email_models.SentEmailModel)):
email_models.SentEmailModel.create(
self.recipient_id, self.recipient_email, self.sender_id,
self.sender_email, feconf.EMAIL_INTENT_SIGNUP,
'Email Subject', 'Email Body', datetime.datetime.utcnow())
self.model_instance = email_models.SentEmailModel.get_by_hash(
'Email Hash')[0]
self.job_class = (
prod_validation_jobs_one_off.SentEmailModelAuditOneOffJob)
def test_standard_model(self):
expected_output = [u'[u\'fully-validated SentEmailModel\', 1]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of SentEmailModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.id, self.model_instance.created_on,
self.model_instance.last_updated
)]
run_job_and_check_output(self, expected_output)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance.sent_datetime = (
datetime.datetime.utcnow() - datetime.timedelta(hours=20))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for current time check of '
'SentEmailModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output)
def test_model_with_non_existent_sender_id(self):
self.sender_model.delete()
expected_output = [(
u'[u\'failed validation check for sender_id field check of '
'SentEmailModel\', '
'[u"Entity id %s: based on field sender_id having value '
'%s, expect model UserSettingsModel with '
'id %s but it doesn\'t exist"]]') % (
self.model_instance.id, self.sender_id, self.sender_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_non_existent_recipient_id(self):
self.recipient_model.delete()
expected_output = [(
u'[u\'failed validation check for recipient_id field check of '
'SentEmailModel\', '
'[u"Entity id %s: based on field recipient_id having value '
'%s, expect model UserSettingsModel with '
'id %s but it doesn\'t exist"]]') % (
self.model_instance.id, self.recipient_id, self.recipient_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_sender_email(self):
self.sender_model.email = '[email protected]'
self.sender_model.put()
expected_output = [(
u'[u\'failed validation check for sender email check of '
'SentEmailModel\', '
'[u\'Entity id %s: Sender email %s in entity does not match with '
'email %s of user obtained through sender id %s\']]') % (
self.model_instance.id, self.model_instance.sender_email,
self.sender_model.email, self.model_instance.sender_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_recipient_email(self):
self.recipient_model.email = '[email protected]'
self.recipient_model.put()
expected_output = [(
u'[u\'failed validation check for recipient email check of '
'SentEmailModel\', '
'[u\'Entity id %s: Recipient email %s in entity does not match '
'with email %s of user obtained through recipient id %s\']]') % (
self.model_instance.id, self.model_instance.recipient_email,
self.recipient_model.email, self.model_instance.recipient_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_sent_datetime_greater_than_current_time(self):
self.model_instance.sent_datetime = (
datetime.datetime.utcnow() + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for sent datetime check of '
'SentEmailModel\', '
'[u\'Entity id %s: The sent_datetime field has a value %s '
'which is greater than the time when the job was run\']]') % (
self.model_instance.id, self.model_instance.sent_datetime)]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_id(self):
model_instance_with_invalid_id = email_models.SentEmailModel(
id='invalid', recipient_id=self.recipient_id,
recipient_email=self.recipient_email, sender_id=self.sender_id,
sender_email=self.sender_email, intent=feconf.EMAIL_INTENT_SIGNUP,
subject='Email Subject', html_body='Email Body',
sent_datetime=datetime.datetime.utcnow())
model_instance_with_invalid_id.put()
expected_output = [(
u'[u\'fully-validated SentEmailModel\', 1]'
), (
u'[u\'failed validation check for model id check of '
'SentEmailModel\', '
'[u\'Entity id %s: Entity id does not match regex pattern\']]'
) % 'invalid']
run_job_and_check_output(self, expected_output, sort=True)
class BulkEmailModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(BulkEmailModelValidatorTests, self).setUp()
self.sender_email = '[email protected]'
self.sender_id = 'sender'
self.sender_model = user_models.UserSettingsModel(
id=self.sender_id, email=self.sender_email)
self.sender_model.put()
self.recipient_ids = ['recipient1', 'recipient2']
self.recipient_model_1 = user_models.UserSettingsModel(
id=self.recipient_ids[0], email='[email protected]')
self.recipient_model_1.put()
self.recipient_model_2 = user_models.UserSettingsModel(
id=self.recipient_ids[1], email='[email protected]')
self.recipient_model_2.put()
self.model_id = 'bulkemailid1'
email_models.BulkEmailModel.create(
self.model_id, self.recipient_ids, self.sender_id,
self.sender_email, feconf.BULK_EMAIL_INTENT_MARKETING,
'Email Subject', 'Email Body', datetime.datetime.utcnow())
self.model_instance = email_models.BulkEmailModel.get_by_id(
self.model_id)
self.job_class = (
prod_validation_jobs_one_off.BulkEmailModelAuditOneOffJob)
def test_standard_model(self):
expected_output = [u'[u\'fully-validated BulkEmailModel\', 1]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of BulkEmailModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.id, self.model_instance.created_on,
self.model_instance.last_updated
)]
run_job_and_check_output(self, expected_output)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance.sent_datetime = (
datetime.datetime.utcnow() - datetime.timedelta(hours=20))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for current time check of '
'BulkEmailModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output)
def test_model_with_non_existent_sender_id(self):
self.sender_model.delete()
expected_output = [(
u'[u\'failed validation check for sender_id field check of '
'BulkEmailModel\', '
'[u"Entity id %s: based on field sender_id having value '
'%s, expect model UserSettingsModel with '
'id %s but it doesn\'t exist"]]') % (
self.model_instance.id, self.sender_id, self.sender_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_non_existent_recipient_id(self):
self.recipient_model_1.delete()
expected_output = [(
u'[u\'failed validation check for recipient_id field check of '
'BulkEmailModel\', '
'[u"Entity id %s: based on field recipient_id having value '
'%s, expect model UserSettingsModel with '
'id %s but it doesn\'t exist"]]') % (
self.model_instance.id, self.recipient_ids[0],
self.recipient_ids[0])]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_sender_email(self):
self.sender_model.email = '[email protected]'
self.sender_model.put()
expected_output = [(
u'[u\'failed validation check for sender email check of '
'BulkEmailModel\', '
'[u\'Entity id %s: Sender email %s in entity does not match with '
'email %s of user obtained through sender id %s\']]') % (
self.model_instance.id, self.model_instance.sender_email,
self.sender_model.email, self.model_instance.sender_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_sent_datetime_greater_than_current_time(self):
self.model_instance.sent_datetime = (
datetime.datetime.utcnow() + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for sent datetime check of '
'BulkEmailModel\', '
'[u\'Entity id %s: The sent_datetime field has a value %s '
'which is greater than the time when the job was run\']]') % (
self.model_instance.id, self.model_instance.sent_datetime)]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_id(self):
model_instance_with_invalid_id = email_models.BulkEmailModel(
id='invalid:id', recipient_ids=self.recipient_ids,
sender_id=self.sender_id, sender_email=self.sender_email,
intent=feconf.BULK_EMAIL_INTENT_MARKETING,
subject='Email Subject', html_body='Email Body',
sent_datetime=datetime.datetime.utcnow())
model_instance_with_invalid_id.put()
expected_output = [(
u'[u\'fully-validated BulkEmailModel\', 1]'
), (
u'[u\'failed validation check for model id check of '
'BulkEmailModel\', '
'[u\'Entity id %s: Entity id does not match regex pattern\']]'
) % model_instance_with_invalid_id.id]
run_job_and_check_output(self, expected_output, sort=True)
class GeneralFeedbackEmailReplyToIdModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(GeneralFeedbackEmailReplyToIdModelValidatorTests, self).setUp()
self.thread_id = feedback_services.create_thread(
'exploration', 'expid', None, 'a subject', 'some text')
self.signup(USER_EMAIL, USER_NAME)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
self.model_instance = (
email_models.GeneralFeedbackEmailReplyToIdModel.create(
self.user_id, self.thread_id))
self.model_instance.put()
self.job_class = (
prod_validation_jobs_one_off
.GeneralFeedbackEmailReplyToIdModelAuditOneOffJob)
def test_standard_model(self):
expected_output = [(
u'[u\'fully-validated GeneralFeedbackEmailReplyToIdModel\', 1]')]
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of GeneralFeedbackEmailReplyToIdModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.id, self.model_instance.created_on,
self.model_instance.last_updated
)]
run_job_and_check_output(self, expected_output)
def test_model_with_last_updated_greater_than_current_time(self):
expected_output = [(
u'[u\'failed validation check for current time check of '
'GeneralFeedbackEmailReplyToIdModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output)
def test_model_with_non_existent_user_id(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [(
u'[u\'failed validation check for item.id.user_id field check of '
'GeneralFeedbackEmailReplyToIdModel\', '
'[u"Entity id %s: based on field item.id.user_id having value '
'%s, expect model UserSettingsModel with '
'id %s but it doesn\'t exist"]]') % (
self.model_instance.id, self.user_id, self.user_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_non_existent_thread_id(self):
feedback_models.GeneralFeedbackThreadModel.get_by_id(
self.thread_id).delete()
expected_output = [(
u'[u\'failed validation check for item.id.thread_id field check of '
'GeneralFeedbackEmailReplyToIdModel\', '
'[u"Entity id %s: based on field item.id.thread_id having value '
'%s, expect model GeneralFeedbackThreadModel with '
'id %s but it doesn\'t exist"]]') % (
self.model_instance.id, self.thread_id, self.thread_id)]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_reply_to_id(self):
while len(
self.model_instance.reply_to_id) <= (
email_models.REPLY_TO_ID_LENGTH):
self.model_instance.reply_to_id = (
self.model_instance.reply_to_id + 'invalid')
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for reply_to_id length check of '
'GeneralFeedbackEmailReplyToIdModel\', '
'[u\'Entity id %s: reply_to_id %s should have length less than or '
'equal to %s but instead has length %s\']]'
) % (
self.model_instance.id, self.model_instance.reply_to_id,
email_models.REPLY_TO_ID_LENGTH,
len(self.model_instance.reply_to_id))]
run_job_and_check_output(self, expected_output)
class ExplorationModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(ExplorationModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
language_codes = ['ar', 'en', 'en']
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
language_code=language_codes[i]
) for i in xrange(3)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
self.model_instance_0 = exp_models.ExplorationModel.get_by_id('0')
self.model_instance_1 = exp_models.ExplorationModel.get_by_id('1')
self.model_instance_2 = exp_models.ExplorationModel.get_by_id('2')
self.job_class = (
prod_validation_jobs_one_off.ExplorationModelAuditOneOffJob)
def test_standard_operation(self):
exp_services.update_exploration(
self.owner_id, '0', [exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'title',
'new_value': 'New title'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated ExplorationModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.commit(
feconf.SYSTEM_COMMITTER_ID, 'created_on test', [])
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ExplorationModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated ExplorationModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
self.model_instance_2.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExplorationModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_exploration_schema(self):
expected_output = [
(
u'[u\'failed validation check for domain object check of '
'ExplorationModel\', '
'[u\'Entity id %s: Entity fails domain validation with the '
'error Invalid language_code: %s\']]'
) % (self.model_instance_0.id, self.model_instance_0.language_code),
u'[u\'fully-validated ExplorationModel\', 2]']
with self.swap(
constants, 'ALL_LANGUAGE_CODES', [{
'code': 'en', 'description': 'English'}]):
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_commit_log_entry_model_failure(self):
exp_services.update_exploration(
self.owner_id, '0', [exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'title',
'new_value': 'New title'
})], 'Changes.')
exp_models.ExplorationCommitLogEntryModel.get_by_id(
'exploration-0-1').delete()
expected_output = [
(
u'[u\'failed validation check for '
'exploration_commit_log_entry_ids field check of '
'ExplorationModel\', '
'[u"Entity id 0: based on field '
'exploration_commit_log_entry_ids having value '
'exploration-0-1, expect model ExplorationCommitLogEntryModel '
'with id exploration-0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated ExplorationModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_summary_model_failure(self):
exp_models.ExpSummaryModel.get_by_id('0').delete()
expected_output = [
(
u'[u\'failed validation check for exp_summary_ids '
'field check of ExplorationModel\', '
'[u"Entity id 0: based on field exp_summary_ids having '
'value 0, expect model ExpSummaryModel with id 0 '
'but it doesn\'t exist"]]'),
u'[u\'fully-validated ExplorationModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_rights_model_failure(self):
exp_models.ExplorationRightsModel.get_by_id(
'0').delete(feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_rights_ids '
'field check of ExplorationModel\', '
'[u"Entity id 0: based on field exploration_rights_ids '
'having value 0, expect model ExplorationRightsModel '
'with id 0 but it doesn\'t exist"]]'),
u'[u\'fully-validated ExplorationModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of ExplorationModel\', '
'[u"Entity id 0: based on field snapshot_metadata_ids having '
'value 0-1, expect model ExplorationSnapshotMetadataModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated ExplorationModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
exp_models.ExplorationSnapshotContentModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of ExplorationModel\', '
'[u"Entity id 0: based on field snapshot_content_ids having '
'value 0-1, expect model ExplorationSnapshotContentModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated ExplorationModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class ExplorationSnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(ExplorationSnapshotMetadataModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(3)]
for exp in explorations:
if exp.id != '0':
exp_services.save_new_exploration(self.owner_id, exp)
else:
exp_services.save_new_exploration(self.user_id, exp)
self.model_instance_0 = (
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'0-1'))
self.model_instance_1 = (
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'1-1'))
self.model_instance_2 = (
exp_models.ExplorationSnapshotMetadataModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.ExplorationSnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
exp_services.update_exploration(
self.owner_id, '0', [exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'title',
'new_value': 'New title'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated ExplorationSnapshotMetadataModel\', 4]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ExplorationSnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'ExplorationSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExplorationSnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('0').delete(
self.user_id, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_ids '
'field check of ExplorationSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field exploration_ids '
'having value 0, expect model ExplorationModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'exploration_ids having value 0, expect model '
'ExplorationModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'ExplorationSnapshotMetadataModel\', 2]')]
run_job_and_check_output(
self, expected_output, literal_eval=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of ExplorationSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field committer_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.user_id, self.user_id), (
u'[u\'fully-validated '
'ExplorationSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_exploration_version_in_model_id(self):
model_with_invalid_version_in_id = (
exp_models.ExplorationSnapshotMetadataModel(
id='0-3', committer_id=self.owner_id, commit_type='edit',
commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for exploration model '
'version check of ExplorationSnapshotMetadataModel\', '
'[u\'Entity id 0-3: Exploration model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot metadata model id\']]'
), (
u'[u\'fully-validated ExplorationSnapshotMetadataModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'add_state'
}, {
'cmd': 'delete_state',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit '
'cmd delete_state check of '
'ExplorationSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'delete_state\', '
'u\'invalid_attribute\': u\'invalid\'} failed with error: '
'The following required attributes are missing: '
'state_name, The following extra attributes are present: '
'invalid_attribute"]]'
), (
u'[u\'failed validation check for commit '
'cmd add_state check of '
'ExplorationSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'add_state\'} '
'failed with error: The following required attributes '
'are missing: state_name"]]'
), u'[u\'fully-validated ExplorationSnapshotMetadataModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class ExplorationSnapshotContentModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(ExplorationSnapshotContentModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(3)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
self.model_instance_0 = (
exp_models.ExplorationSnapshotContentModel.get_by_id(
'0-1'))
self.model_instance_1 = (
exp_models.ExplorationSnapshotContentModel.get_by_id(
'1-1'))
self.model_instance_2 = (
exp_models.ExplorationSnapshotContentModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.ExplorationSnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
exp_services.update_exploration(
self.owner_id, '0', [exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'title',
'new_value': 'New title'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated ExplorationSnapshotContentModel\', 4]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ExplorationSnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'ExplorationSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExplorationSnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('0').delete(self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_ids '
'field check of ExplorationSnapshotContentModel\', '
'[u"Entity id 0-1: based on field exploration_ids '
'having value 0, expect model ExplorationModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'exploration_ids having value 0, expect model '
'ExplorationModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'ExplorationSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_exploration_version_in_model_id(self):
model_with_invalid_version_in_id = (
exp_models.ExplorationSnapshotContentModel(
id='0-3'))
model_with_invalid_version_in_id.content = {}
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for exploration model '
'version check of ExplorationSnapshotContentModel\', '
'[u\'Entity id 0-3: Exploration model corresponding to '
'id 0 has a version 1 which is less than '
'the version 3 in snapshot content model id\']]'
), (
u'[u\'fully-validated ExplorationSnapshotContentModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
class ExplorationRightsModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(ExplorationRightsModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.owner = user_services.UserActionsInfo(self.owner_id)
editor_email = '[email protected]'
viewer_email = '[email protected]'
self.signup(editor_email, 'editor')
self.signup(viewer_email, 'viewer')
self.editor_id = self.get_user_id_from_email(editor_email)
self.viewer_id = self.get_user_id_from_email(viewer_email)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(3)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
rights_manager.assign_role_for_exploration(
self.owner, '0', self.editor_id, rights_manager.ROLE_EDITOR)
rights_manager.assign_role_for_exploration(
self.owner, '2', self.viewer_id, rights_manager.ROLE_VIEWER)
self.model_instance_0 = exp_models.ExplorationRightsModel.get_by_id('0')
self.model_instance_1 = exp_models.ExplorationRightsModel.get_by_id('1')
self.model_instance_2 = exp_models.ExplorationRightsModel.get_by_id('2')
self.job_class = (
prod_validation_jobs_one_off.ExplorationRightsModelAuditOneOffJob)
def test_standard_operation(self):
rights_manager.publish_exploration(self.owner, '0')
expected_output = [
u'[u\'fully-validated ExplorationRightsModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.commit(
feconf.SYSTEM_COMMITTER_ID, 'created_on test', [])
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ExplorationRightsModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
self.model_instance_2.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExplorationRightsModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_first_published_datetime_than_current_time(self):
rights_manager.publish_exploration(self.owner, '0')
rights_manager.publish_exploration(self.owner, '1')
self.model_instance_0.first_published_msec = (
self.model_instance_0.first_published_msec * 1000000.0)
self.model_instance_0.commit(feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for first published msec check '
'of ExplorationRightsModel\', '
'[u\'Entity id 0: The first_published_msec field has a '
'value %s which is greater than the time when the job was '
'run\']]'
) % (self.model_instance_0.first_published_msec),
u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('0').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_ids '
'field check of ExplorationRightsModel\', '
'[u"Entity id 0: based on field exploration_ids having '
'value 0, expect model ExplorationModel with id 0 but '
'it doesn\'t exist"]]'),
u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_cloned_from_exploration_model_failure(self):
self.model_instance_0.cloned_from = 'invalid'
self.model_instance_0.commit(feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for '
'cloned_from_exploration_ids '
'field check of ExplorationRightsModel\', '
'[u"Entity id 0: based on field cloned_from_exploration_ids '
'having value invalid, expect model ExplorationModel with id '
'invalid but it doesn\'t exist"]]'),
u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_owner_user_model_failure(self):
rights_manager.assign_role_for_exploration(
self.owner, '0', self.user_id, rights_manager.ROLE_OWNER)
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for owner_user_ids '
'field check of ExplorationRightsModel\', '
'[u"Entity id 0: based on field owner_user_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]') % (self.user_id, self.user_id),
u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_editor_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.editor_id).delete()
expected_output = [
(
u'[u\'failed validation check for editor_user_ids '
'field check of ExplorationRightsModel\', '
'[u"Entity id 0: based on field editor_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.editor_id, self.editor_id),
u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_viewer_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.viewer_id).delete()
expected_output = [
(
u'[u\'failed validation check for viewer_user_ids '
'field check of ExplorationRightsModel\', '
'[u"Entity id 2: based on field viewer_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.viewer_id, self.viewer_id),
u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
exp_models.ExplorationRightsSnapshotMetadataModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of ExplorationRightsModel\', '
'[u"Entity id 0: based on field snapshot_metadata_ids having '
'value 0-1, expect model '
'ExplorationRightsSnapshotMetadataModel '
'with id 0-1 but it doesn\'t exist"]]'
),
u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
exp_models.ExplorationRightsSnapshotContentModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of ExplorationRightsModel\', '
'[u"Entity id 0: based on field snapshot_content_ids having '
'value 0-1, expect model ExplorationRightsSnapshotContentModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated ExplorationRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class ExplorationRightsSnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(ExplorationRightsSnapshotMetadataModelValidatorTests, self).setUp(
)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(3)]
for exp in explorations:
if exp.id != '0':
exp_services.save_new_exploration(self.owner_id, exp)
else:
exp_services.save_new_exploration(self.user_id, exp)
self.model_instance_0 = (
exp_models.ExplorationRightsSnapshotMetadataModel.get_by_id(
'0-1'))
self.model_instance_1 = (
exp_models.ExplorationRightsSnapshotMetadataModel.get_by_id(
'1-1'))
self.model_instance_2 = (
exp_models.ExplorationRightsSnapshotMetadataModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.ExplorationRightsSnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated ExplorationRightsSnapshotMetadataModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ExplorationRightsSnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'ExplorationRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExplorationRightsSnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_rights_model_failure(self):
exp_models.ExplorationRightsModel.get_by_id('0').delete(
self.user_id, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_rights_ids '
'field check of ExplorationRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field exploration_rights_ids '
'having value 0, expect model ExplorationRightsModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'exploration_rights_ids having value 0, expect model '
'ExplorationRightsModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'ExplorationRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of ExplorationRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field committer_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.user_id, self.user_id), (
u'[u\'fully-validated '
'ExplorationRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_exploration_version_in_model_id(self):
model_with_invalid_version_in_id = (
exp_models.ExplorationRightsSnapshotMetadataModel(
id='0-3', committer_id=self.owner_id, commit_type='edit',
commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for exploration rights model '
'version check of ExplorationRightsSnapshotMetadataModel\', '
'[u\'Entity id 0-3: ExplorationRights model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot metadata model id\']]'
), (
u'[u\'fully-validated '
'ExplorationRightsSnapshotMetadataModel\', 3]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'change_exploration_status',
'old_status': rights_manager.ACTIVITY_STATUS_PUBLIC,
}, {
'cmd': 'release_ownership',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'change_exploration_status check of '
'ExplorationRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'old_status\': u\'public\', '
'u\'cmd\': u\'change_exploration_status\'} '
'failed with error: The following required '
'attributes are missing: new_status"]]'
), (
u'[u\'failed validation check for commit cmd '
'release_ownership check of '
'ExplorationRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'release_ownership\', '
'u\'invalid_attribute\': u\'invalid\'} '
'failed with error: The following extra attributes '
'are present: invalid_attribute"]]'
), (
u'[u\'fully-validated '
'ExplorationRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
class ExplorationRightsSnapshotContentModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(ExplorationRightsSnapshotContentModelValidatorTests, self).setUp(
)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(3)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
self.model_instance_0 = (
exp_models.ExplorationRightsSnapshotContentModel.get_by_id(
'0-1'))
self.model_instance_1 = (
exp_models.ExplorationRightsSnapshotContentModel.get_by_id(
'1-1'))
self.model_instance_2 = (
exp_models.ExplorationRightsSnapshotContentModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.ExplorationRightsSnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated ExplorationRightsSnapshotContentModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ExplorationRightsSnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'ExplorationRightsSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExplorationRightsSnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationRightsModel.get_by_id('0').delete(
self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_rights_ids '
'field check of ExplorationRightsSnapshotContentModel\', '
'[u"Entity id 0-1: based on field exploration_rights_ids '
'having value 0, expect model ExplorationRightsModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'exploration_rights_ids having value 0, expect model '
'ExplorationRightsModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'ExplorationRightsSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_exploration_version_in_model_id(self):
model_with_invalid_version_in_id = (
exp_models.ExplorationRightsSnapshotContentModel(
id='0-3'))
model_with_invalid_version_in_id.content = {}
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for exploration rights model '
'version check of ExplorationRightsSnapshotContentModel\', '
'[u\'Entity id 0-3: ExplorationRights model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot content model id\']]'
), (
u'[u\'fully-validated ExplorationRightsSnapshotContentModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
class ExplorationCommitLogEntryModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(ExplorationCommitLogEntryModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(3)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
self.rights_model_instance = (
exp_models.ExplorationCommitLogEntryModel(
id='rights-1-1', user_id=self.owner_id,
username=self.OWNER_USERNAME, exploration_id='1',
commit_type='edit', commit_message='', commit_cmds=[],
post_commit_status=constants.ACTIVITY_STATUS_PUBLIC,
post_commit_community_owned=False,
post_commit_is_private=False))
self.rights_model_instance.put()
self.model_instance_0 = (
exp_models.ExplorationCommitLogEntryModel.get_by_id(
'exploration-0-1'))
self.model_instance_1 = (
exp_models.ExplorationCommitLogEntryModel.get_by_id(
'exploration-1-1'))
self.model_instance_2 = (
exp_models.ExplorationCommitLogEntryModel.get_by_id(
'exploration-2-1'))
self.job_class = (
prod_validation_jobs_one_off
.ExplorationCommitLogEntryModelAuditOneOffJob)
def test_standard_operation(self):
exp_services.update_exploration(
self.owner_id, '0', [exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'title',
'new_value': 'New title'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated ExplorationCommitLogEntryModel\', 5]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ExplorationCommitLogEntryModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated ExplorationCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
self.rights_model_instance.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExplorationCommitLogEntryModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('0').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_ids '
'field check of ExplorationCommitLogEntryModel\', '
'[u"Entity id exploration-0-1: based on field '
'exploration_ids having value 0, expect model '
'ExplorationModel with id 0 '
'but it doesn\'t exist", u"Entity id exploration-0-2: based '
'on field exploration_ids having value 0, expect model '
'ExplorationModel with id 0 but it doesn\'t exist"]]'
), u'[u\'fully-validated ExplorationCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_rights_model_failure(self):
exp_models.ExplorationRightsModel.get_by_id('1').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_rights_ids '
'field check of ExplorationCommitLogEntryModel\', '
'[u"Entity id rights-1-1: based on field '
'exploration_rights_ids having value 1, expect model '
'ExplorationRightsModel with id 1 but it doesn\'t exist"]]'
), u'[u\'fully-validated ExplorationCommitLogEntryModel\', 3]']
run_job_and_check_output(
self, expected_output, sort=True)
def test_invalid_exploration_version_in_model_id(self):
model_with_invalid_version_in_id = (
exp_models.ExplorationCommitLogEntryModel.create(
'0', 3, self.owner_id, self.OWNER_USERNAME, 'edit',
'msg', [{}],
constants.ACTIVITY_STATUS_PUBLIC, False))
model_with_invalid_version_in_id.exploration_id = '0'
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for exploration model '
'version check of ExplorationCommitLogEntryModel\', '
'[u\'Entity id %s: Exploration model corresponding '
'to id 0 has a version 1 which is less than '
'the version 3 in commit log entry model id\']]'
) % (model_with_invalid_version_in_id.id),
u'[u\'fully-validated ExplorationCommitLogEntryModel\', 4]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_id(self):
model_with_invalid_id = (
exp_models.ExplorationCommitLogEntryModel(
id='invalid-0-1', user_id=self.owner_id,
username=self.OWNER_USERNAME, commit_type='edit',
commit_message='msg', commit_cmds=[{}],
post_commit_status=constants.ACTIVITY_STATUS_PUBLIC,
post_commit_is_private=False))
model_with_invalid_id.exploration_id = '0'
model_with_invalid_id.put()
expected_output = [
(
u'[u\'failed validation check for model id check of '
'ExplorationCommitLogEntryModel\', '
'[u\'Entity id %s: Entity id does not match regex pattern\']]'
) % (model_with_invalid_id.id), (
u'[u\'failed validation check for commit cmd check of '
'ExplorationCommitLogEntryModel\', [u\'Entity id invalid-0-1: '
'No commit command domain object defined for entity with '
'commands: [{}]\']]'),
u'[u\'fully-validated ExplorationCommitLogEntryModel\', 4]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_type(self):
self.model_instance_0.commit_type = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit type check of '
'ExplorationCommitLogEntryModel\', '
'[u\'Entity id exploration-0-1: Commit type invalid is '
'not allowed\']]'
), u'[u\'fully-validated ExplorationCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_post_commit_status(self):
self.model_instance_0.post_commit_status = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit status check '
'of ExplorationCommitLogEntryModel\', '
'[u\'Entity id exploration-0-1: Post commit status invalid '
'is invalid\']]'
), u'[u\'fully-validated ExplorationCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_true_post_commit_is_private(self):
self.model_instance_0.post_commit_status = 'public'
self.model_instance_0.post_commit_is_private = True
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit is private '
'check of ExplorationCommitLogEntryModel\', '
'[u\'Entity id %s: Post commit status is '
'public but post_commit_is_private is True\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated ExplorationCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_false_post_commit_is_private(self):
self.model_instance_0.post_commit_status = 'private'
self.model_instance_0.post_commit_is_private = False
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit is private '
'check of ExplorationCommitLogEntryModel\', '
'[u\'Entity id %s: Post commit status is '
'private but post_commit_is_private is False\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated ExplorationCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'add_state'
}, {
'cmd': 'delete_state',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'delete_state check of '
'ExplorationCommitLogEntryModel\', '
'[u"Entity id exploration-0-1: Commit command domain '
'validation for command: {u\'cmd\': u\'delete_state\', '
'u\'invalid_attribute\': u\'invalid\'} '
'failed with error: The following required attributes '
'are missing: state_name, '
'The following extra attributes are present: '
'invalid_attribute"]]'
), (
u'[u\'failed validation check for commit cmd '
'add_state check of '
'ExplorationCommitLogEntryModel\', '
'[u"Entity id exploration-0-1: Commit command domain '
'validation for command: {u\'cmd\': u\'add_state\'} '
'failed with error: The following required attributes '
'are missing: state_name"]]'
), u'[u\'fully-validated ExplorationCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
class ExpSummaryModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(ExpSummaryModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.owner = user_services.UserActionsInfo(self.owner_id)
editor_email = '[email protected]'
viewer_email = '[email protected]'
contributor_email = '[email protected]'
self.signup(editor_email, 'editor')
self.signup(viewer_email, 'viewer')
self.signup(contributor_email, 'contributor')
self.editor_id = self.get_user_id_from_email(editor_email)
self.viewer_id = self.get_user_id_from_email(viewer_email)
self.contributor_id = self.get_user_id_from_email(contributor_email)
language_codes = ['ar', 'en', 'en']
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
language_code=language_codes[i]
) for i in xrange(3)]
for exp in explorations:
exp.tags = ['math', 'art']
exp_services.save_new_exploration(self.owner_id, exp)
rights_manager.assign_role_for_exploration(
self.owner, '0', self.editor_id, rights_manager.ROLE_EDITOR)
exp_services.update_exploration(
self.contributor_id, '0', [exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'title',
'new_value': 'New title'
})], 'Changes.')
rights_manager.assign_role_for_exploration(
self.owner, '2', self.viewer_id, rights_manager.ROLE_VIEWER)
rating_services.assign_rating_to_exploration(self.user_id, '0', 3)
rating_services.assign_rating_to_exploration(self.viewer_id, '0', 4)
self.model_instance_0 = exp_models.ExpSummaryModel.get_by_id('0')
self.model_instance_1 = exp_models.ExpSummaryModel.get_by_id('1')
self.model_instance_2 = exp_models.ExpSummaryModel.get_by_id('2')
self.job_class = (
prod_validation_jobs_one_off.ExpSummaryModelAuditOneOffJob)
def test_standard_operation(self):
rights_manager.publish_exploration(self.owner, '0')
exp_services.update_exploration(
self.owner_id, '1', [exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'title',
'new_value': 'New title'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated ExpSummaryModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of ExpSummaryModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
exp_models.ExplorationModel.get_by_id('1').delete(
self.owner_id, '')
exp_models.ExplorationModel.get_by_id('2').delete(
self.owner_id, '')
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExpSummaryModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_first_published_datetime_than_current_time(self):
rights_manager.publish_exploration(self.owner, '0')
rights_manager.publish_exploration(self.owner, '1')
self.model_instance_0 = exp_models.ExpSummaryModel.get_by_id('0')
self.model_instance_0.first_published_msec = (
self.model_instance_0.first_published_msec * 1000000.0)
self.model_instance_0.put()
rights_model = exp_models.ExplorationRightsModel.get_by_id('0')
rights_model.first_published_msec = (
self.model_instance_0.first_published_msec)
rights_model.commit(self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for first published msec check '
'of ExpSummaryModel\', '
'[u\'Entity id 0: The first_published_msec field has a '
'value %s which is greater than the time when the '
'job was run\']]'
) % (self.model_instance_0.first_published_msec),
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('0').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_ids '
'field check of ExpSummaryModel\', '
'[u"Entity id 0: based on field exploration_ids having '
'value 0, expect model ExplorationModel with id 0 but '
'it doesn\'t exist"]]'),
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_owner_user_model_failure(self):
rights_manager.assign_role_for_exploration(
self.owner, '0', self.user_id, rights_manager.ROLE_OWNER)
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for owner_user_ids '
'field check of ExpSummaryModel\', '
'[u"Entity id 0: based on field owner_user_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]') % (self.user_id, self.user_id),
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_editor_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.editor_id).delete()
expected_output = [
(
u'[u\'failed validation check for editor_user_ids '
'field check of ExpSummaryModel\', '
'[u"Entity id 0: based on field editor_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.editor_id, self.editor_id),
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_viewer_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.viewer_id).delete()
expected_output = [
(
u'[u\'failed validation check for viewer_user_ids '
'field check of ExpSummaryModel\', '
'[u"Entity id 2: based on field viewer_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.viewer_id, self.viewer_id),
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_contributor_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.contributor_id).delete()
expected_output = [
(
u'[u\'failed validation check for contributor_user_ids '
'field check of ExpSummaryModel\', '
'[u"Entity id 0: based on field contributor_user_ids having '
'value %s, expect model UserSettingsModel with id %s but '
'it doesn\'t exist"]]') % (
self.contributor_id, self.contributor_id),
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_exploration_model_last_updated(self):
last_human_update_time = (
self.model_instance_0.exploration_model_last_updated)
self.model_instance_0.exploration_model_last_updated = (
datetime.datetime.utcnow() + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for exploration model last '
'updated check of ExpSummaryModel\', '
'[u\'Entity id %s: The exploration_model_last_updated '
'field: %s does not match the last time a commit was '
'made by a human contributor: %s\']]'
) % (
self.model_instance_0.id,
self.model_instance_0.exploration_model_last_updated,
last_human_update_time),
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_schema(self):
self.model_instance_0.ratings = {'10': 4, '5': 15}
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for domain object check of '
'ExpSummaryModel\', '
'[u\'Entity id 0: Entity fails domain validation with '
'the error Expected ratings to have keys: 1, 2, 3, 4, 5, '
'received 10, 5\']]'
), u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_contributors_summary(self):
sorted_contributor_ids = sorted(
self.model_instance_0.contributors_summary.keys())
self.model_instance_0.contributors_summary = {'invalid': 1}
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for contributors summary '
'check of ExpSummaryModel\', '
'[u"Entity id 0: Contributor ids: [u\'%s\', u\'%s\'] '
'do not match the contributor ids obtained using '
'contributors summary: [u\'invalid\']"]]') % (
sorted_contributor_ids[0], sorted_contributor_ids[1]
),
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_exploration_related_property(self):
self.model_instance_0.title = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for title field check of '
'ExpSummaryModel\', '
'[u\'Entity id %s: title field in entity: invalid does not '
'match corresponding exploration title field: New title\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_exploration_rights_related_property(self):
self.model_instance_0.status = 'public'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for status field check of '
'ExpSummaryModel\', '
'[u\'Entity id %s: status field in entity: public does not '
'match corresponding exploration rights status field: '
'private\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated ExpSummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class FileMetadataModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(FileMetadataModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'exp%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(2)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
self.model_instance_0 = file_models.FileMetadataModel.create(
'exploration/exp0', 'assets/image/img0.png')
self.model_instance_0.commit(self.owner_id, [])
self.model_instance_1 = file_models.FileMetadataModel.create(
'exploration/exp1', '/exploration/exp1/assets/audio/aud1.mp3')
self.model_instance_1.commit(self.owner_id, [])
self.job_class = (
prod_validation_jobs_one_off.FileMetadataModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated FileMetadataModel\', 2]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.commit(feconf.SYSTEM_COMMITTER_ID, [])
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of FileMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
),
u'[u\'fully-validated FileMetadataModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
expected_output = [(
u'[u\'failed validation check for current time check of '
'FileMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('exp1').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_ids field '
'check of FileMetadataModel\', '
'[u"Entity id %s: based on field exploration_ids having '
'value exp1, expect model ExplorationModel with id exp1 but it '
'doesn\'t exist"]]') % self.model_instance_1.id,
u'[u\'fully-validated FileMetadataModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
file_models.FileMetadataSnapshotMetadataModel.get_by_id(
'%s-1' % self.model_instance_0.id).delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of FileMetadataModel\', '
'[u"Entity id %s: based on field snapshot_metadata_ids '
'having value %s-1, expect model '
'FileMetadataSnapshotMetadataModel '
'with id %s-1 but it doesn\'t exist"]]') % (
self.model_instance_0.id, self.model_instance_0.id,
self.model_instance_0.id),
u'[u\'fully-validated FileMetadataModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
file_models.FileMetadataSnapshotContentModel.get_by_id(
'%s-1' % self.model_instance_0.id).delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of FileMetadataModel\', '
'[u"Entity id %s: based on field snapshot_content_ids having '
'value %s-1, expect model FileMetadataSnapshotContentModel '
'with id %s-1 but it doesn\'t exist"]]') % (
self.model_instance_0.id, self.model_instance_0.id,
self.model_instance_0.id),
u'[u\'fully-validated FileMetadataModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
class FileMetadataSnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(FileMetadataSnapshotMetadataModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'exp%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(2)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
file_metadata_model_0 = file_models.FileMetadataModel.create(
'exploration/exp0', 'assets/image/img0.png')
file_metadata_model_0.commit(self.owner_id, [])
file_metadata_model_1 = file_models.FileMetadataModel.create(
'exploration/exp1', '/exploration/exp1/assets/audio/aud1.mp3')
file_metadata_model_1.commit(self.user_id, [])
self.id_0 = file_metadata_model_0.id
self.id_1 = file_metadata_model_1.id
self.model_instance_0 = (
file_models.FileMetadataSnapshotMetadataModel.get_by_id(
'%s-1' % self.id_0))
self.model_instance_1 = (
file_models.FileMetadataSnapshotMetadataModel.get_by_id(
'%s-1' % self.id_1))
self.job_class = (
prod_validation_jobs_one_off
.FileMetadataSnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated FileMetadataSnapshotMetadataModel\', 2]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of FileMetadataSnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'FileMetadataSnapshotMetadataModel\', 1]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'FileMetadataSnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_file_metadata_model_failure(self):
file_models.FileMetadataModel.get_by_id(self.id_0).delete(
self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for file_metadata_ids '
'field check of FileMetadataSnapshotMetadataModel\', '
'[u"Entity id %s-1: based on field file_metadata_ids '
'having value %s, expect model FileMetadataModel with '
'id %s but it doesn\'t exist", u"Entity id %s-2: based on '
'field file_metadata_ids having value %s, expect model '
'FileMetadataModel with id %s but it doesn\'t exist"]]'
) % (
self.id_0, self.id_0, self.id_0, self.id_0,
self.id_0, self.id_0
),
u'[u\'fully-validated FileMetadataSnapshotMetadataModel\', 1]']
run_job_and_check_output(
self, expected_output, literal_eval=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of FileMetadataSnapshotMetadataModel\', '
'[u"Entity id %s-1: based on field committer_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.id_1, self.user_id, self.user_id), (
u'[u\'fully-validated '
'FileMetadataSnapshotMetadataModel\', 1]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_file_metadata_version_in_model_id(self):
model_with_invalid_version_in_id = (
file_models.FileMetadataSnapshotMetadataModel(
id='%s-3' % self.id_0, committer_id=self.owner_id,
commit_type='edit', commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for file metadata model '
'version check of FileMetadataSnapshotMetadataModel\', '
'[u\'Entity id %s-3: FileMetadata model corresponding to '
'id %s has a version 1 which is less than the version 3 in '
'snapshot metadata model id\']]'
) % (self.id_0, self.id_0), (
u'[u\'fully-validated FileMetadataSnapshotMetadataModel\', '
'2]')]
run_job_and_check_output(self, expected_output, sort=True)
class FileMetadataSnapshotContentModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(FileMetadataSnapshotContentModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'exp%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(2)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
file_metadata_model_0 = file_models.FileMetadataModel.create(
'exploration/exp0', 'assets/image/img0.png')
file_metadata_model_0.commit(self.owner_id, [])
file_metadata_model_1 = file_models.FileMetadataModel.create(
'exploration/exp1', '/exploration/exp1/assets/audio/aud1.mp3')
file_metadata_model_1.commit(self.owner_id, [])
self.id_0 = file_metadata_model_0.id
self.id_1 = file_metadata_model_1.id
self.model_instance_0 = (
file_models.FileMetadataSnapshotContentModel.get_by_id(
'%s-1' % self.id_0))
self.model_instance_1 = (
file_models.FileMetadataSnapshotContentModel.get_by_id(
'%s-1' % self.id_1))
self.job_class = (
prod_validation_jobs_one_off
.FileMetadataSnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated FileMetadataSnapshotContentModel\', 2]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of FileMetadataSnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'FileMetadataSnapshotContentModel\', 1]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'FileMetadataSnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_file_metadata_model_failure(self):
file_models.FileMetadataModel.get_by_id(
self.id_0).delete(self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for file_metadata_ids '
'field check of FileMetadataSnapshotContentModel\', '
'[u"Entity id %s-1: based on field file_metadata_ids '
'having value %s, expect model FileMetadataModel with '
'id %s but it doesn\'t exist", u"Entity id %s-2: based on '
'field file_metadata_ids having value %s, expect model '
'FileMetadataModel with id %s but it doesn\'t exist"]]'
) % (
self.id_0, self.id_0, self.id_0, self.id_0, self.id_0,
self.id_0),
u'[u\'fully-validated FileMetadataSnapshotContentModel\', 1]']
run_job_and_check_output(self, expected_output, literal_eval=True)
def test_invalid_file_metadata_version_in_model_id(self):
model_with_invalid_version_in_id = (
file_models.FileMetadataSnapshotContentModel(
id='%s-3' % self.id_0))
model_with_invalid_version_in_id.content = {}
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for file metadata model '
'version check of FileMetadataSnapshotContentModel\', '
'[u\'Entity id %s-3: FileMetadata model corresponding to '
'id %s has a version 1 which is less than '
'the version 3 in snapshot content model id\']]'
) % (self.id_0, self.id_0), (
u'[u\'fully-validated FileMetadataSnapshotContentModel\', '
'2]')]
run_job_and_check_output(self, expected_output, sort=True)
class FileModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(FileModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'exp%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(2)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
self.model_instance_0 = file_models.FileModel.create(
'exploration/exp0', 'assets/image/img0.png')
self.model_instance_0.commit(self.owner_id, [])
self.model_instance_1 = file_models.FileModel.create(
'exploration/exp1', '/exploration/exp1/assets/audio/aud1.mp3')
self.model_instance_1.commit(self.owner_id, [])
self.job_class = (
prod_validation_jobs_one_off.FileModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated FileModel\', 2]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.commit(feconf.SYSTEM_COMMITTER_ID, [])
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of FileModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
),
u'[u\'fully-validated FileModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
expected_output = [(
u'[u\'failed validation check for current time check of '
'FileModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('exp1').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_ids field '
'check of FileModel\', '
'[u"Entity id %s: based on field exploration_ids having '
'value exp1, expect model ExplorationModel with id exp1 '
'but it doesn\'t exist"]]') % self.model_instance_1.id,
u'[u\'fully-validated FileModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
file_models.FileSnapshotMetadataModel.get_by_id(
'%s-1' % self.model_instance_0.id).delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of FileModel\', '
'[u"Entity id %s: based on field snapshot_metadata_ids '
'having value %s-1, expect model FileSnapshotMetadataModel '
'with id %s-1 but it doesn\'t exist"]]') % (
self.model_instance_0.id, self.model_instance_0.id,
self.model_instance_0.id),
u'[u\'fully-validated FileModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
file_models.FileSnapshotContentModel.get_by_id(
'%s-1' % self.model_instance_0.id).delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of FileModel\', '
'[u"Entity id %s: based on field snapshot_content_ids having '
'value %s-1, expect model FileSnapshotContentModel '
'with id %s-1 but it doesn\'t exist"]]') % (
self.model_instance_0.id, self.model_instance_0.id,
self.model_instance_0.id),
u'[u\'fully-validated FileModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
class FileSnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(FileSnapshotMetadataModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'exp%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(2)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
file_model_0 = file_models.FileModel.create(
'exploration/exp0', 'assets/image/img0.png')
file_model_0.commit(self.owner_id, [])
file_model_1 = file_models.FileModel.create(
'exploration/exp1', '/exploration/exp1/assets/audio/aud1.mp3')
file_model_1.commit(self.user_id, [])
self.id_0 = file_model_0.id
self.id_1 = file_model_1.id
self.model_instance_0 = (
file_models.FileSnapshotMetadataModel.get_by_id(
'%s-1' % self.id_0))
self.model_instance_1 = (
file_models.FileSnapshotMetadataModel.get_by_id(
'%s-1' % self.id_1))
self.job_class = (
prod_validation_jobs_one_off
.FileSnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated FileSnapshotMetadataModel\', 2]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of FileSnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'FileSnapshotMetadataModel\', 1]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'FileSnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_file_model_failure(self):
file_models.FileModel.get_by_id(self.id_0).delete(
self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for file_ids '
'field check of FileSnapshotMetadataModel\', '
'[u"Entity id %s-1: based on field file_ids '
'having value %s, expect model FileModel with '
'id %s but it doesn\'t exist", u"Entity id %s-2: based on '
'field file_ids having value %s, expect model '
'FileModel with id %s but it doesn\'t exist"]]'
) % (
self.id_0, self.id_0, self.id_0, self.id_0,
self.id_0, self.id_0),
u'[u\'fully-validated FileSnapshotMetadataModel\', 1]']
run_job_and_check_output(
self, expected_output, literal_eval=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of FileSnapshotMetadataModel\', '
'[u"Entity id %s-1: based on field committer_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.id_1, self.user_id, self.user_id), (
u'[u\'fully-validated '
'FileSnapshotMetadataModel\', 1]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_file_version_in_model_id(self):
model_with_invalid_version_in_id = (
file_models.FileSnapshotMetadataModel(
id='%s-3' % self.id_0, committer_id=self.owner_id,
commit_type='edit', commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for file model '
'version check of FileSnapshotMetadataModel\', '
'[u\'Entity id %s-3: File model corresponding to '
'id %s has a version 1 which is less than the version 3 in '
'snapshot metadata model id\']]'
) % (self.id_0, self.id_0), (
u'[u\'fully-validated FileSnapshotMetadataModel\', '
'2]')]
run_job_and_check_output(self, expected_output, sort=True)
class FileSnapshotContentModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(FileSnapshotContentModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'exp%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(2)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
file_model_0 = file_models.FileModel.create(
'exploration/exp0', 'assets/image/img0.png')
file_model_0.commit(self.owner_id, [])
file_model_1 = file_models.FileModel.create(
'exploration/exp1', '/exploration/exp1/assets/audio/aud1.mp3')
file_model_1.commit(self.owner_id, [])
self.id_0 = file_model_0.id
self.id_1 = file_model_1.id
self.model_instance_0 = (
file_models.FileSnapshotContentModel.get_by_id(
'%s-1' % self.id_0))
self.model_instance_1 = (
file_models.FileSnapshotContentModel.get_by_id(
'%s-1' % self.id_1))
self.job_class = (
prod_validation_jobs_one_off.FileSnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated FileSnapshotContentModel\', 2]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of FileSnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'FileSnapshotContentModel\', 1]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'FileSnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_file_model_failure(self):
file_models.FileModel.get_by_id(
self.id_0).delete(self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for file_ids '
'field check of FileSnapshotContentModel\', '
'[u"Entity id %s-1: based on field file_ids '
'having value %s, expect model FileModel with '
'id %s but it doesn\'t exist", u"Entity id %s-2: based on '
'field file_ids having value %s, expect model '
'FileModel with id %s but it doesn\'t exist"]]'
) % (
self.id_0, self.id_0, self.id_0, self.id_0, self.id_0,
self.id_0),
u'[u\'fully-validated FileSnapshotContentModel\', 1]']
run_job_and_check_output(self, expected_output, literal_eval=True)
def test_invalid_file_version_in_model_id(self):
model_with_invalid_version_in_id = (
file_models.FileSnapshotContentModel(
id='%s-3' % self.id_0))
model_with_invalid_version_in_id.content = 'content'
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for file model '
'version check of FileSnapshotContentModel\', '
'[u\'Entity id %s-3: File model corresponding to '
'id %s has a version 1 which is less than '
'the version 3 in snapshot content model id\']]'
) % (self.id_0, self.id_0), (
u'[u\'fully-validated FileSnapshotContentModel\', '
'2]')]
run_job_and_check_output(self, expected_output, sort=True)
class ExplorationRecommendationsModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(ExplorationRecommendationsModelValidatorTests, self).setUp()
self.signup(USER_EMAIL, USER_NAME)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.user_id, exp)
recommendations_services.set_recommendations('0', ['3', '4'])
recommendations_services.set_recommendations('1', ['5'])
self.model_instance_0 = (
recommendations_models.ExplorationRecommendationsModel.get_by_id(
'0'))
self.model_instance_1 = (
recommendations_models.ExplorationRecommendationsModel.get_by_id(
'1'))
self.job_class = (
prod_validation_jobs_one_off
.ExplorationRecommendationsModelAuditOneOffJob)
def test_standard_model(self):
expected_output = [(
u'[u\'fully-validated ExplorationRecommendationsModel\', 2]')]
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of ExplorationRecommendationsModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id, self.model_instance_0.created_on,
self.model_instance_0.last_updated),
u'[u\'fully-validated ExplorationRecommendationsModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'ExplorationRecommendationsModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output)
def test_model_with_missing_recommended_exploration(self):
exp_models.ExplorationModel.get_by_id('3').delete(
self.user_id, '', [{}])
expected_output = [
(
u'[u\'failed validation check for exploration_ids field '
'check of ExplorationRecommendationsModel\', '
'[u"Entity id 0: based on field exploration_ids having value '
'3, expect model ExplorationModel with '
'id 3 but it doesn\'t exist"]]'
),
u'[u\'fully-validated ExplorationRecommendationsModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_id_in_recommended_ids(self):
self.model_instance_0.recommended_exploration_ids = ['0', '4']
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for item exploration id check '
'of ExplorationRecommendationsModel\', '
'[u\'Entity id 0: The exploration id: 0 for which the '
'entity is created is also present in the recommended '
'exploration ids for entity\']]'
),
u'[u\'fully-validated ExplorationRecommendationsModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
class TopicSimilaritiesModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(TopicSimilaritiesModelValidatorTests, self).setUp()
self.model_instance = recommendations_models.TopicSimilaritiesModel(
id=recommendations_models.TOPIC_SIMILARITIES_ID)
self.content = {
'Art': {'Art': '1.0', 'Biology': '0.8', 'Chemistry': '0.1'},
'Biology': {'Art': '0.8', 'Biology': '1.0', 'Chemistry': '0.5'},
'Chemistry': {'Art': '0.1', 'Biology': '0.5', 'Chemistry': '1.0'},
}
self.model_instance.content = self.content
self.model_instance.put()
self.job_class = (
prod_validation_jobs_one_off.TopicSimilaritiesModelAuditOneOffJob)
def test_standard_model(self):
expected_output = [(
u'[u\'fully-validated TopicSimilaritiesModel\', 1]')]
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance.created_on = (
self.model_instance.last_updated + datetime.timedelta(days=1))
self.model_instance.put()
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of TopicSimilaritiesModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance.id, self.model_instance.created_on,
self.model_instance.last_updated)]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
expected_output = [(
u'[u\'failed validation check for current time check of '
'TopicSimilaritiesModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance.id, self.model_instance.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_id(self):
model_with_invalid_id = recommendations_models.TopicSimilaritiesModel(
id='invalid', content=self.content)
model_with_invalid_id.put()
expected_output = [
(
u'[u\'failed validation check for model id check of '
'TopicSimilaritiesModel\', '
'[u\'Entity id invalid: Entity id does not match regex '
'pattern\']]'
),
u'[u\'fully-validated TopicSimilaritiesModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_topic_similarities_columns(self):
content = {
'Art': {'Art': '1.0', 'Biology': '0.5'},
'Biology': {}
}
self.model_instance.content = content
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for topic similarity check '
'of TopicSimilaritiesModel\', '
'[u"Entity id topics: Topic similarity validation for '
'content: {u\'Biology\': {}, u\'Art\': {u\'Biology\': u\'0.5\', '
'u\'Art\': u\'1.0\'}} fails with error: Length of topic '
'similarities columns: 1 does not match length of '
'topic list: 2."]]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_topic(self):
content = {
'Art': {'Art': '1.0', 'invalid': '0.5'},
'invalid': {'Art': '0.5', 'invalid': '1.0'}
}
self.model_instance.content = content
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for topic similarity check '
'of TopicSimilaritiesModel\', '
'[u"Entity id topics: Topic similarity validation for '
'content: {u\'Art\': {u\'Art\': u\'1.0\', u\'invalid\': u\'0.5\'}, '
'u\'invalid\': {u\'Art\': u\'0.5\', u\'invalid\': u\'1.0\'}} '
'fails with error: Topic invalid not in list of known topics."]]')]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_topic_similarities_rows(self):
content = {
'Art': {'Art': '1.0', 'Biology': '0.5'}
}
self.model_instance.content = content
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for topic similarity check '
'of TopicSimilaritiesModel\', [u"Entity id topics: '
'Topic similarity validation for content: {u\'Art\': '
'{u\'Biology\': u\'0.5\', u\'Art\': u\'1.0\'}} fails with '
'error: Length of topic similarities rows: 2 does not match '
'length of topic list: 1."]]')]
run_job_and_check_output(self, expected_output)
def test_model_with_invalid_similarity_type(self):
content = {
'Art': {'Art': 'one', 'Biology': 0.5},
'Biology': {'Art': 0.5, 'Biology': 1.0}
}
self.model_instance.content = content
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for topic similarity '
'check of TopicSimilaritiesModel\', '
'[u"Entity id topics: Topic similarity validation for '
'content: {u\'Biology\': {u\'Biology\': 1.0, u\'Art\': 0.5}, '
'u\'Art\': {u\'Biology\': 0.5, u\'Art\': u\'one\'}} '
'fails with error: Expected similarity to be a float, '
'received one"]]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_similarity_value(self):
content = {
'Art': {'Art': 10.0, 'Biology': 0.5},
'Biology': {'Art': 0.5, 'Biology': 1.0}
}
self.model_instance.content = content
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for topic similarity check '
'of TopicSimilaritiesModel\', '
'[u"Entity id topics: Topic similarity validation for '
'content: {u\'Biology\': {u\'Biology\': 1.0, u\'Art\': 0.5}, '
'u\'Art\': {u\'Biology\': 0.5, u\'Art\': 10.0}} '
'fails with error: Expected similarity to be between '
'0.0 and 1.0, received 10.0"]]')]
run_job_and_check_output(self, expected_output)
def test_model_with_assymetric_content(self):
content = {
'Art': {'Art': 1.0, 'Biology': 0.5},
'Biology': {'Art': 0.6, 'Biology': 1.0}
}
self.model_instance.content = content
self.model_instance.put()
expected_output = [(
u'[u\'failed validation check for topic similarity '
'check of TopicSimilaritiesModel\', '
'[u"Entity id topics: Topic similarity validation for '
'content: {u\'Biology\': {u\'Biology\': 1.0, u\'Art\': 0.6}, '
'u\'Art\': {u\'Biology\': 0.5, u\'Art\': 1.0}} fails with error: '
'Expected topic similarities to be symmetric."]]')]
run_job_and_check_output(self, expected_output)
class StoryModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(StoryModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i,
) for i in xrange(6)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
topic = topic_domain.Topic.create_default_topic(
topic_id='0', name='topic')
language_codes = ['ar', 'en', 'en']
stories = [story_domain.Story.create_default_story(
'%s' % i,
title='title %d',
corresponding_topic_id='0'
) for i in xrange(3)]
for index, story in enumerate(stories):
story.language_code = language_codes[index]
story.add_node('node_1', 'Node1')
story.add_node('node_2', 'Node2')
story.update_node_destination_node_ids('node_1', ['node_2'])
story.update_node_exploration_id(
'node_1', explorations[index * 2].id)
story.update_node_exploration_id(
'node_2', explorations[index * 2 + 1].id)
topic.add_canonical_story(story.id)
story_services.save_new_story(self.owner_id, story)
topic_services.save_new_topic(self.owner_id, topic)
self.model_instance_0 = story_models.StoryModel.get_by_id('0')
self.model_instance_1 = story_models.StoryModel.get_by_id('1')
self.model_instance_2 = story_models.StoryModel.get_by_id('2')
self.job_class = (
prod_validation_jobs_one_off.StoryModelAuditOneOffJob)
def test_standard_operation(self):
story_services.update_story(
self.owner_id, '0', [story_domain.StoryChange({
'cmd': 'update_story_property',
'property_name': 'title',
'new_value': 'New title',
'old_value': 'title 0'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated StoryModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.commit(
feconf.SYSTEM_COMMITTER_ID, 'created_on test', [])
expected_output = [
(
u'[u\'failed validation check for time field relation check '
'of StoryModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
),
u'[u\'fully-validated StoryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
self.model_instance_2.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
expected_output = [(
u'[u\'failed validation check for current time check of '
'StoryModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_story_schema(self):
expected_output = [
(
u'[u\'failed validation check for domain object check of '
'StoryModel\', '
'[u\'Entity id %s: Entity fails domain validation with the '
'error Invalid language code: %s\']]'
) % (self.model_instance_0.id, self.model_instance_0.language_code),
u'[u\'fully-validated StoryModel\', 2]']
with self.swap(
constants, 'ALL_LANGUAGE_CODES', [{
'code': 'en', 'description': 'English'}]):
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_exploration_model_failure(self):
exp_models.ExplorationModel.get_by_id('1').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for exploration_ids field '
'check of StoryModel\', '
'[u"Entity id 0: based on field exploration_ids having value '
'1, expect model ExplorationModel with id 1 but it '
'doesn\'t exist"]]'),
u'[u\'fully-validated StoryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_commit_log_entry_model_failure(self):
story_services.update_story(
self.owner_id, '0', [story_domain.StoryChange({
'cmd': 'update_story_property',
'property_name': 'title',
'new_value': 'New title',
'old_value': 'title 0'
})], 'Changes.')
story_models.StoryCommitLogEntryModel.get_by_id(
'story-0-1').delete()
expected_output = [
(
u'[u\'failed validation check for '
'story_commit_log_entry_ids field check of '
'StoryModel\', '
'[u"Entity id 0: based on field '
'story_commit_log_entry_ids having value '
'story-0-1, expect model StoryCommitLogEntryModel '
'with id story-0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated StoryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_summary_model_failure(self):
story_models.StorySummaryModel.get_by_id('0').delete()
expected_output = [
(
u'[u\'failed validation check for story_summary_ids '
'field check of StoryModel\', '
'[u"Entity id 0: based on field story_summary_ids having '
'value 0, expect model StorySummaryModel with id 0 '
'but it doesn\'t exist"]]'),
u'[u\'fully-validated StoryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_rights_model_failure(self):
story_models.StoryRightsModel.get_by_id(
'0').delete(feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for story_rights_ids '
'field check of StoryModel\', '
'[u"Entity id 0: based on field story_rights_ids having '
'value 0, expect model StoryRightsModel with id 0 but '
'it doesn\'t exist"]]'),
u'[u\'fully-validated StoryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
story_models.StorySnapshotMetadataModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of StoryModel\', '
'[u"Entity id 0: based on field snapshot_metadata_ids having '
'value 0-1, expect model StorySnapshotMetadataModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated StoryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
story_models.StorySnapshotContentModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of StoryModel\', '
'[u"Entity id 0: based on field snapshot_content_ids having '
'value 0-1, expect model StorySnapshotContentModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated StoryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class StorySnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(StorySnapshotMetadataModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
topic = topic_domain.Topic.create_default_topic(
topic_id='0', name='topic')
stories = [story_domain.Story.create_default_story(
'%s' % i,
title='title %d' % i,
corresponding_topic_id='0'
) for i in xrange(3)]
for story in stories:
if story.id != '0':
story_services.save_new_story(self.owner_id, story)
else:
story_services.save_new_story(self.user_id, story)
topic.add_canonical_story(story.id)
topic_services.save_new_topic(self.owner_id, topic)
self.model_instance_0 = (
story_models.StorySnapshotMetadataModel.get_by_id(
'0-1'))
self.model_instance_1 = (
story_models.StorySnapshotMetadataModel.get_by_id(
'1-1'))
self.model_instance_2 = (
story_models.StorySnapshotMetadataModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.StorySnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
story_services.update_story(
self.owner_id, '0', [story_domain.StoryChange({
'cmd': 'update_story_property',
'property_name': 'title',
'new_value': 'New title',
'old_value': 'title 0'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated StorySnapshotMetadataModel\', 4]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of StorySnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'StorySnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'StorySnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_model_failure(self):
story_models.StoryModel.get_by_id('0').delete(
self.user_id, '', [])
expected_output = [
(
u'[u\'failed validation check for story_ids '
'field check of StorySnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field story_ids '
'having value 0, expect model StoryModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'story_ids having value 0, expect model '
'StoryModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'StorySnapshotMetadataModel\', 2]')]
run_job_and_check_output(
self, expected_output, literal_eval=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of StorySnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field committer_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.user_id, self.user_id), (
u'[u\'fully-validated '
'StorySnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_story_version_in_model_id(self):
model_with_invalid_version_in_id = (
story_models.StorySnapshotMetadataModel(
id='0-3', committer_id=self.owner_id, commit_type='edit',
commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for story model '
'version check of StorySnapshotMetadataModel\', '
'[u\'Entity id 0-3: Story model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot metadata model id\']]'
), (
u'[u\'fully-validated StorySnapshotMetadataModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'add_story_node'
}, {
'cmd': 'delete_story_node',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'delete_story_node check of '
'StorySnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'delete_story_node\', '
'u\'invalid_attribute\': u\'invalid\'} failed with error: '
'The following required attributes are missing: '
'node_id, The following extra attributes are present: '
'invalid_attribute"]]'
), (
u'[u\'failed validation check for commit cmd add_story_node '
'check of StorySnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'add_story_node\'} '
'failed with error: The following required attributes '
'are missing: node_id, title"]]'
), u'[u\'fully-validated StorySnapshotMetadataModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class StorySnapshotContentModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(StorySnapshotContentModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
topic = topic_domain.Topic.create_default_topic(
topic_id='0', name='topic')
stories = [story_domain.Story.create_default_story(
'%s' % i,
title='title %d' % i,
corresponding_topic_id='0'
) for i in xrange(3)]
for story in stories:
story_services.save_new_story(self.owner_id, story)
topic.add_canonical_story(story.id)
topic_services.save_new_topic(self.owner_id, topic)
self.model_instance_0 = (
story_models.StorySnapshotContentModel.get_by_id(
'0-1'))
self.model_instance_1 = (
story_models.StorySnapshotContentModel.get_by_id(
'1-1'))
self.model_instance_2 = (
story_models.StorySnapshotContentModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.StorySnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
story_services.update_story(
self.owner_id, '0', [story_domain.StoryChange({
'cmd': 'update_story_property',
'property_name': 'title',
'new_value': 'New title',
'old_value': 'title 0'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated StorySnapshotContentModel\', 4]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of StorySnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'StorySnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'StorySnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_model_failure(self):
story_models.StoryModel.get_by_id('0').delete(self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for story_ids '
'field check of StorySnapshotContentModel\', '
'[u"Entity id 0-1: based on field story_ids '
'having value 0, expect model StoryModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'story_ids having value 0, expect model '
'StoryModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'StorySnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_story_version_in_model_id(self):
model_with_invalid_version_in_id = (
story_models.StorySnapshotContentModel(
id='0-3'))
model_with_invalid_version_in_id.content = {}
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for story model '
'version check of StorySnapshotContentModel\', '
'[u\'Entity id 0-3: Story model corresponding to '
'id 0 has a version 1 which is less than '
'the version 3 in snapshot content model id\']]'
), (
u'[u\'fully-validated StorySnapshotContentModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
class StoryRightsModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(StoryRightsModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.admin = user_services.UserActionsInfo(self.admin_id)
manager1_email = '[email protected]'
manager2_email = '[email protected]'
self.signup(manager1_email, 'manager1')
self.signup(manager2_email, 'manager2')
self.set_topic_managers(['manager1', 'manager2'])
self.manager1_id = self.get_user_id_from_email(manager1_email)
self.manager2_id = self.get_user_id_from_email(manager2_email)
self.manager1 = user_services.UserActionsInfo(self.manager1_id)
self.manager2 = user_services.UserActionsInfo(self.manager2_id)
topic = topic_domain.Topic.create_default_topic(
topic_id='0', name='topic')
stories = [story_domain.Story.create_default_story(
'%s' % i,
title='title %d' % i,
corresponding_topic_id='0'
) for i in xrange(3)]
for story in stories:
story_services.save_new_story(self.owner_id, story)
topic.add_canonical_story(story.id)
topic_services.save_new_topic(self.owner_id, topic)
story_services.assign_role(
self.admin, self.manager1, story_domain.ROLE_MANAGER, stories[0].id)
story_services.assign_role(
self.admin, self.manager2, story_domain.ROLE_MANAGER, stories[0].id)
story_services.assign_role(
self.admin, self.manager2, story_domain.ROLE_MANAGER, stories[1].id)
self.model_instance_0 = story_models.StoryRightsModel.get_by_id('0')
self.model_instance_1 = story_models.StoryRightsModel.get_by_id('1')
self.model_instance_2 = story_models.StoryRightsModel.get_by_id('2')
self.job_class = (
prod_validation_jobs_one_off.StoryRightsModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated StoryRightsModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.commit(
feconf.SYSTEM_COMMITTER_ID, 'created_on test', [])
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of StoryRightsModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated StoryRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
self.model_instance_2.delete(feconf.SYSTEM_COMMITTER_ID, 'delete')
expected_output = [(
u'[u\'failed validation check for current time check of '
'StoryRightsModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_model_failure(self):
story_models.StoryModel.get_by_id('0').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for story_ids '
'field check of StoryRightsModel\', '
'[u"Entity id 0: based on field story_ids having '
'value 0, expect model StoryModel with id 0 but '
'it doesn\'t exist"]]'),
u'[u\'fully-validated StoryRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_manager_user_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.manager1_id).delete()
expected_output = [
(
u'[u\'failed validation check for manager_user_ids '
'field check of StoryRightsModel\', '
'[u"Entity id 0: based on field manager_user_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]') % (
self.manager1_id, self.manager1_id),
u'[u\'fully-validated StoryRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_metadata_model_failure(self):
story_models.StoryRightsSnapshotMetadataModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_metadata_ids '
'field check of StoryRightsModel\', '
'[u"Entity id 0: based on field snapshot_metadata_ids having '
'value 0-1, expect model '
'StoryRightsSnapshotMetadataModel '
'with id 0-1 but it doesn\'t exist"]]'
),
u'[u\'fully-validated StoryRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_snapshot_content_model_failure(self):
story_models.StoryRightsSnapshotContentModel.get_by_id(
'0-1').delete()
expected_output = [
(
u'[u\'failed validation check for snapshot_content_ids '
'field check of StoryRightsModel\', '
'[u"Entity id 0: based on field snapshot_content_ids having '
'value 0-1, expect model StoryRightsSnapshotContentModel '
'with id 0-1 but it doesn\'t exist"]]'),
u'[u\'fully-validated StoryRightsModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class StoryRightsSnapshotMetadataModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(StoryRightsSnapshotMetadataModelValidatorTests, self).setUp(
)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
topic = topic_domain.Topic.create_default_topic(
topic_id='0', name='topic')
stories = [story_domain.Story.create_default_story(
'%s' % i,
title='title %d' % i,
corresponding_topic_id='0'
) for i in xrange(3)]
for story in stories:
if story.id != '0':
story_services.save_new_story(self.owner_id, story)
else:
story_services.save_new_story(self.user_id, story)
topic.add_canonical_story(story.id)
topic_services.save_new_topic(self.owner_id, topic)
self.model_instance_0 = (
story_models.StoryRightsSnapshotMetadataModel.get_by_id(
'0-1'))
self.model_instance_1 = (
story_models.StoryRightsSnapshotMetadataModel.get_by_id(
'1-1'))
self.model_instance_2 = (
story_models.StoryRightsSnapshotMetadataModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.StoryRightsSnapshotMetadataModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated StoryRightsSnapshotMetadataModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of StoryRightsSnapshotMetadataModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'StoryRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'StoryRightsSnapshotMetadataModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_rights_model_failure(self):
story_models.StoryRightsModel.get_by_id('0').delete(
self.user_id, '', [])
expected_output = [
(
u'[u\'failed validation check for story_rights_ids '
'field check of StoryRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field story_rights_ids '
'having value 0, expect model StoryRightsModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'story_rights_ids having value 0, expect model '
'StoryRightsModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'StoryRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_committer_model_failure(self):
user_models.UserSettingsModel.get_by_id(self.user_id).delete()
expected_output = [
(
u'[u\'failed validation check for committer_ids field '
'check of StoryRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: based on field committer_ids having '
'value %s, expect model UserSettingsModel with id %s '
'but it doesn\'t exist"]]'
) % (self.user_id, self.user_id), (
u'[u\'fully-validated '
'StoryRightsSnapshotMetadataModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_story_version_in_model_id(self):
model_with_invalid_version_in_id = (
story_models.StoryRightsSnapshotMetadataModel(
id='0-3', committer_id=self.owner_id, commit_type='edit',
commit_message='msg', commit_cmds=[{}]))
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for story rights model '
'version check of StoryRightsSnapshotMetadataModel\', '
'[u\'Entity id 0-3: StoryRights model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot metadata model id\']]'
), (
u'[u\'fully-validated '
'StoryRightsSnapshotMetadataModel\', 3]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'change_role',
'assignee_id': 'id',
'new_role': 'manager'
}, {
'cmd': 'publish_story',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'change_role check of '
'StoryRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'assignee_id\': u\'id\', '
'u\'cmd\': u\'change_role\', u\'new_role\': u\'manager\'} '
'failed with error: The following required attributes '
'are missing: old_role"]]'
), (
u'[u\'failed validation check for commit cmd publish_story '
'check of StoryRightsSnapshotMetadataModel\', '
'[u"Entity id 0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'publish_story\', '
'u\'invalid_attribute\': u\'invalid\'} failed with error: '
'The following extra attributes are present: '
'invalid_attribute"]]'
), u'[u\'fully-validated StoryRightsSnapshotMetadataModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class StoryRightsSnapshotContentModelValidatorTests(
test_utils.GenericTestBase):
def setUp(self):
super(StoryRightsSnapshotContentModelValidatorTests, self).setUp(
)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
topic = topic_domain.Topic.create_default_topic(
topic_id='0', name='topic')
stories = [story_domain.Story.create_default_story(
'%s' % i,
title='title %d' % i,
corresponding_topic_id='0'
) for i in xrange(3)]
for story in stories:
story_services.save_new_story(self.owner_id, story)
topic.add_canonical_story(story.id)
topic_services.save_new_topic(self.owner_id, topic)
self.model_instance_0 = (
story_models.StoryRightsSnapshotContentModel.get_by_id(
'0-1'))
self.model_instance_1 = (
story_models.StoryRightsSnapshotContentModel.get_by_id(
'1-1'))
self.model_instance_2 = (
story_models.StoryRightsSnapshotContentModel.get_by_id(
'2-1'))
self.job_class = (
prod_validation_jobs_one_off
.StoryRightsSnapshotContentModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated StoryRightsSnapshotContentModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of StoryRightsSnapshotContentModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), (
u'[u\'fully-validated '
'StoryRightsSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'StoryRightsSnapshotContentModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_model_failure(self):
story_models.StoryRightsModel.get_by_id('0').delete(
self.owner_id, '', [])
expected_output = [
(
u'[u\'failed validation check for story_rights_ids '
'field check of StoryRightsSnapshotContentModel\', '
'[u"Entity id 0-1: based on field story_rights_ids '
'having value 0, expect model StoryRightsModel with '
'id 0 but it doesn\'t exist", u"Entity id 0-2: based on field '
'story_rights_ids having value 0, expect model '
'StoryRightsModel with id 0 but it doesn\'t exist"]]'
), (
u'[u\'fully-validated '
'StoryRightsSnapshotContentModel\', 2]')]
run_job_and_check_output(self, expected_output, sort=True)
def test_invalid_story_version_in_model_id(self):
model_with_invalid_version_in_id = (
story_models.StoryRightsSnapshotContentModel(
id='0-3'))
model_with_invalid_version_in_id.content = {}
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for story rights model '
'version check of StoryRightsSnapshotContentModel\', '
'[u\'Entity id 0-3: StoryRights model corresponding to '
'id 0 has a version 1 which is less than the version 3 in '
'snapshot content model id\']]'
), (
u'[u\'fully-validated StoryRightsSnapshotContentModel\', '
'3]')]
run_job_and_check_output(self, expected_output, sort=True)
class StoryCommitLogEntryModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(StoryCommitLogEntryModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
topic = topic_domain.Topic.create_default_topic(
topic_id='0', name='topic')
stories = [story_domain.Story.create_default_story(
'%s' % i,
title='title %d' % i,
corresponding_topic_id='0'
) for i in xrange(3)]
for story in stories:
story_services.save_new_story(self.owner_id, story)
topic.add_canonical_story(story.id)
topic_services.save_new_topic(self.owner_id, topic)
self.model_instance_0 = (
story_models.StoryCommitLogEntryModel.get_by_id(
'story-0-1'))
self.model_instance_1 = (
story_models.StoryCommitLogEntryModel.get_by_id(
'story-1-1'))
self.model_instance_2 = (
story_models.StoryCommitLogEntryModel.get_by_id(
'story-2-1'))
self.job_class = (
prod_validation_jobs_one_off
.StoryCommitLogEntryModelAuditOneOffJob)
def test_standard_operation(self):
story_services.update_story(
self.owner_id, '0', [story_domain.StoryChange({
'cmd': 'update_story_property',
'property_name': 'title',
'new_value': 'New title',
'old_value': 'title 0'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated StoryCommitLogEntryModel\', 4]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of StoryCommitLogEntryModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated StoryCommitLogEntryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
self.model_instance_1.delete()
self.model_instance_2.delete()
expected_output = [(
u'[u\'failed validation check for current time check of '
'StoryCommitLogEntryModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_model_failure(self):
story_models.StoryModel.get_by_id('0').delete(
feconf.SYSTEM_COMMITTER_ID, '', [])
expected_output = [
(
u'[u\'failed validation check for story_ids '
'field check of StoryCommitLogEntryModel\', '
'[u"Entity id story-0-1: based on field story_ids '
'having value 0, expect model StoryModel with id 0 '
'but it doesn\'t exist", u"Entity id story-0-2: based '
'on field story_ids having value 0, expect model '
'StoryModel with id 0 but it doesn\'t exist"]]'
), u'[u\'fully-validated StoryCommitLogEntryModel\', 2]']
run_job_and_check_output(self, expected_output, literal_eval=True)
def test_invalid_story_version_in_model_id(self):
model_with_invalid_version_in_id = (
story_models.StoryCommitLogEntryModel.create(
'0', 3, self.owner_id, self.OWNER_USERNAME, 'edit',
'msg', [{}],
constants.ACTIVITY_STATUS_PUBLIC, False))
model_with_invalid_version_in_id.story_id = '0'
model_with_invalid_version_in_id.put()
expected_output = [
(
u'[u\'failed validation check for story model '
'version check of StoryCommitLogEntryModel\', '
'[u\'Entity id %s: Story model corresponding '
'to id 0 has a version 1 which is less than '
'the version 3 in commit log entry model id\']]'
) % (model_with_invalid_version_in_id.id),
u'[u\'fully-validated StoryCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_id(self):
model_with_invalid_id = (
story_models.StoryCommitLogEntryModel(
id='invalid-0-1', user_id=self.owner_id,
username=self.OWNER_USERNAME, commit_type='edit',
commit_message='msg', commit_cmds=[{}],
post_commit_status=constants.ACTIVITY_STATUS_PUBLIC,
post_commit_is_private=False))
model_with_invalid_id.story_id = '0'
model_with_invalid_id.put()
expected_output = [
(
u'[u\'failed validation check for model id check of '
'StoryCommitLogEntryModel\', '
'[u\'Entity id %s: Entity id does not match regex pattern\']]'
) % (model_with_invalid_id.id), (
u'[u\'failed validation check for commit cmd check of '
'StoryCommitLogEntryModel\', [u\'Entity id invalid-0-1: '
'No commit command domain object defined for entity with '
'commands: [{}]\']]'),
u'[u\'fully-validated StoryCommitLogEntryModel\', 3]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_type(self):
self.model_instance_0.commit_type = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit type check of '
'StoryCommitLogEntryModel\', '
'[u\'Entity id story-0-1: Commit type invalid is '
'not allowed\']]'
), u'[u\'fully-validated StoryCommitLogEntryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_post_commit_status(self):
self.model_instance_0.post_commit_status = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit status check '
'of StoryCommitLogEntryModel\', '
'[u\'Entity id story-0-1: Post commit status invalid '
'is invalid\']]'
), u'[u\'fully-validated StoryCommitLogEntryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_true_post_commit_is_private(self):
self.model_instance_0.post_commit_status = 'public'
self.model_instance_0.post_commit_is_private = True
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit is private '
'check of StoryCommitLogEntryModel\', '
'[u\'Entity id %s: Post commit status is '
'public but post_commit_is_private is True\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated StoryCommitLogEntryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_false_post_commit_is_private(self):
self.model_instance_0.post_commit_status = 'private'
self.model_instance_0.post_commit_is_private = False
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for post commit is private '
'check of StoryCommitLogEntryModel\', '
'[u\'Entity id %s: Post commit status is '
'private but post_commit_is_private is False\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated StoryCommitLogEntryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_commit_cmd_schmea(self):
self.model_instance_0.commit_cmds = [{
'cmd': 'add_story_node'
}, {
'cmd': 'delete_story_node',
'invalid_attribute': 'invalid'
}]
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for commit cmd '
'delete_story_node check of '
'StoryCommitLogEntryModel\', '
'[u"Entity id story-0-1: Commit command domain '
'validation for command: {u\'cmd\': u\'delete_story_node\', '
'u\'invalid_attribute\': u\'invalid\'} failed with error: '
'The following required attributes are missing: node_id, '
'The following extra attributes are present: '
'invalid_attribute"]]'
), (
u'[u\'failed validation check for commit cmd '
'add_story_node check of StoryCommitLogEntryModel\', '
'[u"Entity id story-0-1: Commit command domain validation '
'for command: {u\'cmd\': u\'add_story_node\'} '
'failed with error: The following required attributes '
'are missing: node_id, title"]]'
), u'[u\'fully-validated StoryCommitLogEntryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class StorySummaryModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(StorySummaryModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
language_codes = ['ar', 'en', 'en']
topic = topic_domain.Topic.create_default_topic(
topic_id='0', name='topic')
stories = [story_domain.Story.create_default_story(
'%s' % i,
title='title %d' % i,
corresponding_topic_id='0'
) for i in xrange(3)]
for index, story in enumerate(stories):
story.description = 'story-test'
story.language_code = language_codes[index]
story_services.save_new_story(self.owner_id, story)
topic.add_canonical_story(story.id)
topic_services.save_new_topic(self.owner_id, topic)
self.model_instance_0 = story_models.StorySummaryModel.get_by_id('0')
self.model_instance_1 = story_models.StorySummaryModel.get_by_id('1')
self.model_instance_2 = story_models.StorySummaryModel.get_by_id('2')
self.job_class = (
prod_validation_jobs_one_off.StorySummaryModelAuditOneOffJob)
def test_standard_operation(self):
story_services.update_story(
self.owner_id, '1', [story_domain.StoryChange({
'cmd': 'update_story_property',
'property_name': 'title',
'new_value': 'New title',
'old_value': 'title 0'
})], 'Changes.')
expected_output = [
u'[u\'fully-validated StorySummaryModel\', 3]']
run_job_and_check_output(self, expected_output)
def test_model_with_created_on_greater_than_last_updated(self):
self.model_instance_0.created_on = (
self.model_instance_0.last_updated + datetime.timedelta(days=1))
self.model_instance_0.put()
expected_output = [(
u'[u\'failed validation check for time field relation check '
'of StorySummaryModel\', '
'[u\'Entity id %s: The created_on field has a value '
'%s which is greater than the value '
'%s of last_updated field\']]') % (
self.model_instance_0.id,
self.model_instance_0.created_on,
self.model_instance_0.last_updated
), u'[u\'fully-validated StorySummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_last_updated_greater_than_current_time(self):
story_services.delete_story(self.owner_id, '1')
story_services.delete_story(self.owner_id, '2')
expected_output = [(
u'[u\'failed validation check for current time check of '
'StorySummaryModel\', '
'[u\'Entity id %s: The last_updated field has a '
'value %s which is greater than the time when the job was run\']]'
) % (self.model_instance_0.id, self.model_instance_0.last_updated)]
with self.swap(datetime, 'datetime', MockDatetime13Hours), self.swap(
db.DateTimeProperty, 'data_type', MockDatetime13Hours):
update_datastore_types_for_mock_datetime()
run_job_and_check_output(self, expected_output, sort=True)
def test_missing_story_model_failure(self):
story_model = story_models.StoryModel.get_by_id('0')
story_model.delete(feconf.SYSTEM_COMMITTER_ID, '', [])
self.model_instance_0.story_model_last_updated = (
story_model.last_updated)
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for story_ids '
'field check of StorySummaryModel\', '
'[u"Entity id 0: based on field story_ids having '
'value 0, expect model StoryModel with id 0 but '
'it doesn\'t exist"]]'),
u'[u\'fully-validated StorySummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_node_count(self):
self.model_instance_0.node_count = 10
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for node count check of '
'StorySummaryModel\', [u\'Entity id 0: Node count: 10 does '
'not match the number of nodes in story_contents dict: []\']]'
), u'[u\'fully-validated StorySummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
def test_model_with_invalid_story_related_property(self):
self.model_instance_0.title = 'invalid'
self.model_instance_0.put()
expected_output = [
(
u'[u\'failed validation check for title field check of '
'StorySummaryModel\', '
'[u\'Entity id %s: title field in entity: invalid does not '
'match corresponding story title field: title 0\']]'
) % self.model_instance_0.id,
u'[u\'fully-validated StorySummaryModel\', 2]']
run_job_and_check_output(self, expected_output, sort=True)
class UserSubscriptionsModelValidatorTests(test_utils.GenericTestBase):
def setUp(self):
super(UserSubscriptionsModelValidatorTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(USER_EMAIL, USER_NAME)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.user_id = self.get_user_id_from_email(USER_EMAIL)
self.owner = user_services.UserActionsInfo(self.owner_id)
explorations = [exp_domain.Exploration.create_default_exploration(
'%s' % i,
title='title %d' % i,
category='category%d' % i
) for i in xrange(3)]
for exp in explorations:
exp_services.save_new_exploration(self.owner_id, exp)
rights_manager.publish_exploration(self.owner, exp.id)
collections = [collection_domain.Collection.create_default_collection(
'%s' % i,
title='title %d' % i,
category='category%d' % i
) for i in xrange(3, 6)]
for collection in collections:
collection_services.save_new_collection(self.owner_id, collection)
rights_manager.publish_collection(self.owner, collection.id)
thread_id = feedback_services.create_thread(
'exploration', 'exp_id', None, 'a subject', 'some text')
subscription_services.subscribe_to_thread(
self.user_id, thread_id)
subscription_services.subscribe_to_creator(self.user_id, self.owner_id)
for exp in explorations:
subscription_services.subscribe_to_exploration(
self.user_id, exp.id)
for collection in collections:
subscription_services.subscribe_to_collection(
self.user_id, collection.id)
self.process_and_flush_pending_tasks()
self.job_class = (
prod_validation_jobs_one_off.UserSubscriptionsModelAuditOneOffJob)
def test_standard_operation(self):
expected_output = [
u'[u\'fully-validated UserSubscriptionsModel\', 2]']
run_job_and_check_output(self, expected_output)
def test_get_external_id_relationship_failure(self):
nonexist_thread_id = 'nonexist_thread_id'
subscription_services.subscribe_to_thread(
self.user_id, nonexist_thread_id)
expected_output = [
(
u'[u\'failed validation check for general_feedback_thread_ids '
'field check of UserSubscriptionsModel\', '
'[u"Entity id 110211048197157141232: based on '
'field general_feedback_thread_ids having value '
'nonexist_thread_id, expect model GeneralFeedbackThreadModel '
'with id nonexist_thread_id but it doesn\'t exist"]]'),
u'[u\'fully-validated UserSubscriptionsModel\', 1]']
run_job_and_check_output(self, expected_output, sort=True)
| 45.437904 | 80 | 0.623512 | [
"Apache-2.0"
] | cclauss/oppia | core/domain/prod_validation_jobs_one_off_test.py | 287,940 | Python |
from django.db import models
from django.conf import settings
# Create your models here.
class Message(models.Model):
id = models.AutoField(primary_key=True)
sender_id = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="message_sender", on_delete=models.DO_NOTHING, null=True)
receiver_id = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="message_receiver", on_delete=models.DO_NOTHING, null=True)
content = models.TextField(null=True)
read_at = models.DateTimeField(auto_now=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ['-created_at']
class Follower(models.Model):
id = models.AutoField(primary_key=True)
user_id = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="following", on_delete=models.CASCADE, null=True)
follower_id = models.ManyToManyField(settings.AUTH_USER_MODEL, related_name="follower")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self) -> str:
return self.user_id.username
class Meta:
ordering = ['user_id']
class PostCategory(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = "Post Categories"
ordering = ['name']
def __str__(self):
return self.name
class Post(models.Model):
id = models.AutoField(primary_key=True)
user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(None))
title = models.CharField(max_length=50, null=True)
post_category_id = models.ForeignKey(PostCategory, null=True, on_delete=models.SET_NULL, blank=True)
content = models.TextField(null=True)
likes = models.ManyToManyField(settings.AUTH_USER_MODEL, blank=True, related_name="likes")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ['-created_at']
def __str__(self):
return str(self.title)
class PostComment(models.Model):
id = models.AutoField(primary_key=True)
post_id = models.ForeignKey(Post, on_delete=models.CASCADE)
commenter_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
content = models.TextField(null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = "Post Comments"
ordering = ['-created_at']
class PostLike(models.Model):
id = models.AutoField(primary_key=True)
liker_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
post_id = models.ForeignKey(Post, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = "Post Likes"
| 35.247191 | 134 | 0.739241 | [
"MIT"
] | KayT77/SMA-Team-2 | Details/models.py | 3,137 | Python |
import customSocket
import sys, pygame
#constants
windowSize = width, height = 800, 600
#displayed in the window t ogive directiosn to the driver
instructionTextLines = open('commands.txt').readlines()
activeColor = (0,175,0)
inactiveColor = (255,0,0)
textColor = (0,0,0)
screen = pygame.display.set_mode(windowSize)
################window initialization#################################
#makes hte window, sets color, displays text etc.
def initializeWindow():
pygame.init()
setBackgorundColor(activeColor)
pygame.display.set_caption('CWRU NASA RMC 2015-2016')
displayIntructionText()
def displayIntructionText():
for lineNumber,lineText in enumerate(instructionTextLines):
displayText(lineText, lineNumber)
#creating the text object, putting it in the window, updating
#takes in a string
def displayText(text, lineNumber):
font = pygame.font.SysFont("monospace", 20)
textSurface, textContainer = getTextObject(text, font)
textContainer.center = (width/2,10+25*lineNumber)
screen.blit(textSurface, textContainer)
pygame.display.update()
#getting the font, text rectangle etc.
#takes in the string of fonts and a pygame Font
def getTextObject(text, font):
textSurface = font.render(text, True, textColor)
return textSurface, textSurface.get_rect()
def setBackgorundColor(colorTuple):
screen.fill(colorTuple)
pygame.display.update()
################################# Gettting Keyboard state ################################
#gets the currently pressed keys and sends them over the socket
def sendKeyPresses():
quit = False
keysPressed= []
while(True and (quit == False)):
nextEvent = str(pygame.event.wait())
if('KeyDown' in nextEvent):
#socket.customSend(lastEvent.split(', ')[1].split(' ')[1])
key = nextEvent.split(', ')[1].split(' ')[1]
sendCommand(translateToHex(key))
if(key == '27'):
quit = True
pygame.quit()
def translateToHex(key):
return{
'273': 76,
'274': 77,
'275': 78,
'276': 79,
'46': 57,
'47': 58,
'115': 33,
'119': 17,
'100': 34,
'97': 32,
'102': 35,
'114': 19,
'104': 37,
'32': 64,
'111': 24,
'27': 69,
}.get(key,0)
#Waits for a keyboard event, determines which keys are pressed after each keyboard event,
#returns the list of currently pressed keys
def getNextKeys():
return getCurrentKeys()
#def sendKeys(keys):
#socket.send(keys)
def getCurrentKeys():
pygameEvent = pygame.event.wait()
if pygameEvent.event.event_name() == "KEYDOWN":
return pygame.key.getPressed();
############### Main program ####################################
initializeWindow()
initializeSocket()
sendKeyPresses()
| 25.910891 | 92 | 0.670615 | [
"Apache-2.0"
] | sturzl/keyboardControlSocket | commandWindow.py | 2,617 | Python |
import os
from gym import utils
from gym.envs.robotics import fetch_env
import numpy as np
from goal_prox.envs.holdout_sampler import HoldoutSampler, LineHoldoutSampler
from goal_prox.envs.old_holdout_sampler import OldHoldoutSampler
# Ensure we get the path separator correct on windows
MODEL_XML_PATH = os.path.join('fetch', 'push.xml')
Y_NOISE = 0.02
X_NOISE = 0.05
OBJ_X_NOISE = 0.05
OFFSET = 0.10
class FetchPushEnvCustom(fetch_env.FetchEnv, utils.EzPickle):
def __init__(self, reward_type='dense'):
initial_qpos = {
'robot0:slide0': 0.405,
'robot0:slide1': 0.48,
'robot0:slide2': 0.0,
'object0:joint': [1.25, 0.53, 0.4, 1., 0., 0., 0.],
}
self.coverage = 1.0
self.goal_noise = True
self.rnd_gen = False
self.set_noise_ratio(1.0, 1.0)
fetch_env.FetchEnv.__init__(
self, MODEL_XML_PATH, has_object=True, block_gripper=True, n_substeps=20,
gripper_extra_height=0.0, target_in_the_air=False, target_offset=0,
# The ranges shouldn't matter because we sample ourselves
obj_range=0.1, target_range=0, distance_threshold=0.05,
initial_qpos=initial_qpos, reward_type=reward_type)
utils.EzPickle.__init__(self)
def set_noise_ratio(self, noise_ratio, goal_noise_ratio):
self.obj_sampler = OldHoldoutSampler([-noise_ratio * OBJ_X_NOISE, 0],
[noise_ratio * OBJ_X_NOISE, noise_ratio * Y_NOISE * 2], 4)
self.goal_sampler = OldHoldoutSampler(
[-goal_noise_ratio*X_NOISE, -goal_noise_ratio*Y_NOISE * 2],
[goal_noise_ratio*X_NOISE, 0], 4)
# self.obj_sampler = OldHoldoutSampler([-noise_ratio * OBJ_X_NOISE, -noise_ratio * Y_NOISE],
# [noise_ratio * OBJ_X_NOISE, noise_ratio * Y_NOISE], 4)
# self.goal_sampler = OldHoldoutSampler(
# [-goal_noise_ratio*X_NOISE, -goal_noise_ratio*Y_NOISE],
# [goal_noise_ratio*X_NOISE, goal_noise_ratio*Y_NOISE], 4)
def _get_obs(self):
obs = super()._get_obs()
obs['observation'] = np.concatenate([obs['observation'],
obs['desired_goal']])
return obs
def relabel_ob(self, ob_current, ob_future):
import torch
if isinstance(ob_current, torch.Tensor):
return torch.cat([ob_current[:-3], ob_future[-3:]])
return np.concatenate([ob_current[:-3], ob_future[-3:]])
def is_reached(self, ob):
import torch
if isinstance(ob, torch.Tensor):
ob = ob.cpu()
dist = np.linalg.norm(ob[-3:] - ob[3:6])
return float(dist < self.distance_threshold)
def _reset_sim(self):
self.sim.set_state(self.initial_state)
# Randomize start position of object.
if self.has_object:
object_xpos = self.initial_gripper_xpos[:2] + np.array([0.0, OFFSET])
object_xpos += self.obj_sampler.sample(self.coverage,
self.np_random)
object_qpos = self.sim.data.get_joint_qpos('object0:joint')
assert object_qpos.shape == (7,)
object_qpos[:2] = object_xpos
self.sim.data.set_joint_qpos('object0:joint', object_qpos)
self.sim.forward()
return True
def _sample_goal(self):
goal = self.initial_gripper_xpos[:3] + np.array([0.0, -1*OFFSET, 0.0])
if self.goal_noise:
goal[:2]+= self.goal_sampler.sample(self.coverage, self.np_random)
goal += self.target_offset
goal[2] = self.height_offset
return goal.copy()
def _viewer_setup(self):
body_id = self.sim.model.body_name2id('robot0:gripper_link')
lookat = self.sim.data.body_xpos[body_id]
lookat = [1.34193362, 0.74910034, 0.55472272]
for idx, value in enumerate(lookat):
self.viewer.cam.lookat[idx] = value
self.viewer.cam.distance = 1.3
self.viewer.cam.azimuth = 132
self.viewer.cam.elevation = -14.
def _render_callback(self):
# Visualize target.
sites_offset = (self.sim.data.site_xpos - self.sim.model.site_pos).copy()
site_id = self.sim.model.site_name2id('target0')
self.sim.model.site_pos[site_id] = self.goal - sites_offset[0]
self.sim.forward()
class FetchDebugPushEnv(FetchPushEnvCustom):
def set_noise_ratio(self, noise_ratio, goal_noise_ratio):
noise_ratio *= 1
y_noise_scale = 0.15 / (noise_ratio * Y_NOISE)
#y_noise_scale = 1.0
self.obj_sampler = LineHoldoutSampler(
[-noise_ratio * OBJ_X_NOISE, -y_noise_scale*noise_ratio * Y_NOISE],
[noise_ratio * OBJ_X_NOISE, y_noise_scale*noise_ratio * Y_NOISE])
self.goal_sampler = HoldoutSampler(
[-goal_noise_ratio*X_NOISE, -goal_noise_ratio*Y_NOISE],
[goal_noise_ratio*X_NOISE, goal_noise_ratio*Y_NOISE], 1, True)
| 38.8125 | 100 | 0.636675 | [
"MIT"
] | clvrai/goal_prox_il | goal_prox/envs/fetch/custom_push.py | 4,968 | Python |
from ._tracing import Beam
from ._sbt import trace_surfaces | 29.5 | 32 | 0.847458 | [
"MIT"
] | draustin/otk | otk/asbt1/__init__.py | 59 | Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2017 Guenter Bartsch, Heiko Schaefer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import logging
import codecs
from nltools import misc
from sqlalchemy.orm import sessionmaker
from zamiaai import model
from zamiaprolog.logicdb import LogicDB
from aiprolog.runtime import AIPrologRuntime
from aiprolog.parser import AIPrologParser
UNITTEST_MODULE = 'unittests'
UNITTEST_CONTEXT = 'unittests'
class TestAIProlog (unittest.TestCase):
def setUp(self):
config = misc.load_config('.airc')
#
# logic DB
#
self.db = LogicDB(model.url)
#
# aiprolog environment setup
#
self.prolog_rt = AIPrologRuntime(self.db)
self.parser = AIPrologParser(self.db)
self.prolog_rt.set_trace(True)
self.db.clear_module(UNITTEST_MODULE)
# @unittest.skip("temporarily disabled")
def test_tokenize(self):
clause = self.parser.parse_line_clause_body("tokenize (de, 'hallo, welt!', X)")
logging.debug('clause: %s' % clause)
solutions = self.prolog_rt.search(clause)
logging.debug('solutions: %s' % repr(solutions))
self.assertEqual (len(solutions), 1)
self.assertEqual (len(solutions[0]['X'].l), 2)
# @unittest.skip("temporarily disabled")
def test_edit_distance(self):
clause = self.parser.parse_line_clause_body("edit_distance (['hallo', 'welt'], ['hallo', 'springfield'], X)")
logging.debug('clause: %s' % clause)
solutions = self.prolog_rt.search(clause)
logging.debug('solutions: %s' % repr(solutions))
self.assertEqual (len(solutions), 1)
self.assertEqual (solutions[0]['X'].f, 1.0)
# class TestMacroEngine (unittest.TestCase):
#
# def setUp(self):
# Session = sessionmaker(bind=model.engine)
# self.session = Session()
#
# def testLocalMacros(self):
#
# me = NLPMacroEngine(self.session)
# discourses = me.macro_expand('de', u'(HAL,|Computer,|Du,|) (Ich bin|Ich fühle mich|Man bin ich|Da bin ich) (zufrieden|so zufrieden|glücklich|so glücklich|froh|so froh)', u'', None)
#
# self.assertEqual(len(discourses), 96)
#
# def testMacroTokens(self):
#
# me = NLPMacroEngine(self.session)
# discourses = me.macro_expand('de', u'hallo (HAL|Computer|Du|lieber computer|) wie geht es dir (heute|)',
# u'foo @MACRO_0:TSTART_W_0 bar @MACRO_0:TEND_W_0 @MACRO_0:W baz @MACRO_1:TEND_W_0?', None)
#
# self.assertEqual(len(discourses), 10)
# self.assertEqual(discourses[0][1], u'foo 1 bar 2 HAL baz 7?')
#
# discourses = me.macro_expand('de', u'foobar what is the full name of (foo|donald trump)',
# u'foo @MACRO_0:TSTART_W_0 bar @MACRO_0:TEND_W_0', None)
#
# self.assertEqual(len(discourses), 2)
# self.assertEqual(discourses[0][1], u'foo 7 bar 8')
# self.assertEqual(discourses[1][1], u'foo 7 bar 9')
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
unittest.main()
| 32.752137 | 190 | 0.640919 | [
"Apache-2.0"
] | 0zAND1z/zamia-ai | tests/test_aiprolog.py | 3,835 | Python |
from __future__ import absolute_import
# --------------------------------------------------------
# Faster R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick and Sean Bell
# --------------------------------------------------------
# --------------------------------------------------------
# Reorganized and modified by Jianwei Yang and Jiasen Lu
# --------------------------------------------------------
import torch
import torch.nn as nn
import numpy as np
import math
import yaml
from model.utils.config import cfg
from .generate_anchors import generate_anchors
from .bbox_transform import bbox_transform_inv, clip_boxes, clip_boxes_batch
from model.nms.nms_wrapper import nms
import pdb
DEBUG = False
class _ProposalLayer(nn.Module):
"""
Outputs object detection proposals by applying estimated bounding-box
transformations to a set of regular boxes (called "anchors").
"""
def __init__(self, feat_stride, scales, ratios):
super(_ProposalLayer, self).__init__()
self._feat_stride = feat_stride
self._anchors = torch.from_numpy(generate_anchors(scales=np.array(scales),
ratios=np.array(ratios))).float()
self._num_anchors = self._anchors.size(0)
# rois blob: holds R regions of interest, each is a 5-tuple
# (n, x1, y1, x2, y2) specifying an image batch index n and a
# rectangle (x1, y1, x2, y2)
# top[0].reshape(1, 5)
#
# # scores blob: holds scores for R regions of interest
# if len(top) > 1:
# top[1].reshape(1, 1, 1, 1)
def forward(self, input):
# Algorithm:
#
# for each (H, W) location i
# generate A anchor boxes centered on cell i
# apply predicted bbox deltas at cell i to each of the A anchors
# clip predicted boxes to image
# remove predicted boxes with either height or width < threshold
# sort all (proposal, score) pairs by score from highest to lowest
# take top pre_nms_topN proposals before NMS
# apply NMS with threshold 0.7 to remaining proposals
# take after_nms_topN proposals after NMS
# return the top proposals (-> RoIs top, scores top)
# the first set of _num_anchors channels are bg probs
# the second set are the fg probs
scores = input[0][:, self._num_anchors:, :, :]
bbox_deltas = input[1]
im_info = input[2]
cfg_key = input[3]
pre_nms_topN = cfg[cfg_key].RPN_PRE_NMS_TOP_N
post_nms_topN = cfg[cfg_key].RPN_POST_NMS_TOP_N
nms_thresh = cfg[cfg_key].RPN_NMS_THRESH
min_size = cfg[cfg_key].RPN_MIN_SIZE
batch_size = bbox_deltas.size(0)
feat_height, feat_width = scores.size(2), scores.size(3)
shift_x = np.arange(0, feat_width) * self._feat_stride
shift_y = np.arange(0, feat_height) * self._feat_stride
shift_x, shift_y = np.meshgrid(shift_x, shift_y)
shifts = torch.from_numpy(np.vstack((shift_x.ravel(), shift_y.ravel(),
shift_x.ravel(), shift_y.ravel())).transpose())
shifts = shifts.contiguous().type_as(scores).float()
A = self._num_anchors
K = shifts.size(0)
self._anchors = self._anchors.type_as(scores)
# anchors = self._anchors.view(1, A, 4) + shifts.view(1, K, 4).permute(1, 0, 2).contiguous()
anchors = self._anchors.view(1, A, 4) + shifts.view(K, 1, 4)
anchors = anchors.view(1, K * A, 4).expand(batch_size, K * A, 4)
# Transpose and reshape predicted bbox transformations to get them
# into the same order as the anchors:
bbox_deltas = bbox_deltas.permute(0, 2, 3, 1).contiguous()
bbox_deltas = bbox_deltas.view(batch_size, -1, 4)
# Same story for the scores:
scores = scores.permute(0, 2, 3, 1).contiguous()
scores = scores.view(batch_size, -1)
# Convert anchors into proposals via bbox transformations
proposals = bbox_transform_inv(anchors, bbox_deltas, batch_size)
# 2. clip predicted boxes to image
proposals = clip_boxes(proposals, im_info, batch_size)
# proposals = clip_boxes_batch(proposals, im_info, batch_size)
# assign the score to 0 if it's non keep.
# keep = self._filter_boxes(proposals, min_size * im_info[:, 2])
# trim keep index to make it euqal over batch
# keep_idx = torch.cat(tuple(keep_idx), 0)
# scores_keep = scores.view(-1)[keep_idx].view(batch_size, trim_size)
# proposals_keep = proposals.view(-1, 4)[keep_idx, :].contiguous().view(batch_size, trim_size, 4)
# _, order = torch.sort(scores_keep, 1, True)
scores_keep = scores
proposals_keep = proposals
_, order = torch.sort(scores_keep, 1, True)
output = scores.new(batch_size, post_nms_topN, 5).zero_()
for i in range(batch_size):
# # 3. remove predicted boxes with either height or width < threshold
# # (NOTE: convert min_size to input image scale stored in im_info[2])
proposals_single = proposals_keep[i]
scores_single = scores_keep[i]
# # 4. sort all (proposal, score) pairs by score from highest to lowest
# # 5. take top pre_nms_topN (e.g. 6000)
order_single = order[i]
if pre_nms_topN > 0 and pre_nms_topN < scores_keep.numel():
order_single = order_single[:pre_nms_topN]
proposals_single = proposals_single[order_single, :]
scores_single = scores_single[order_single].view(-1, 1)
# 6. apply nms (e.g. threshold = 0.7)
# 7. take after_nms_topN (e.g. 300)
# 8. return the top proposals (-> RoIs top)
keep_idx_i = nms(torch.cat((proposals_single, scores_single), 1), nms_thresh, force_cpu=not cfg.USE_GPU_NMS)
keep_idx_i = keep_idx_i.long().view(-1)
if post_nms_topN > 0:
keep_idx_i = keep_idx_i[:post_nms_topN]
proposals_single = proposals_single[keep_idx_i, :]
scores_single = scores_single[keep_idx_i, :]
# padding 0 at the end.
num_proposal = proposals_single.size(0)
output[i, :, 0] = i
output[i, :num_proposal, 1:] = proposals_single
return output
def backward(self, top, propagate_down, bottom):
"""This layer does not propagate gradients."""
pass
def reshape(self, bottom, top):
"""Reshaping happens during the call to forward."""
pass
def _filter_boxes(self, boxes, min_size):
"""Remove all boxes with any side smaller than min_size."""
ws = boxes[:, :, 2] - boxes[:, :, 0] + 1
hs = boxes[:, :, 3] - boxes[:, :, 1] + 1
keep = ((ws >= min_size.view(-1, 1).expand_as(ws)) & (hs >= min_size.view(-1, 1).expand_as(hs)))
return keep
| 39.943503 | 120 | 0.603112 | [
"MIT"
] | busyboxs/pytorch-faster-rcnn | lib/model/rpn/proposal_layer.py | 7,070 | Python |
"""
Use lldb Python API to verify that expression evaluation for property references uses the correct getters and setters
"""
from __future__ import print_function
import os
import time
import re
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCPropertyTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break for main.c.
self.source_name = 'main.m'
@skipUnlessDarwin
@add_test_categories(['pyapi'])
def test_objc_properties(self):
"""Test that expr uses the correct property getters and setters"""
if self.getArchitecture() == 'i386':
self.skipTest("requires modern objc runtime")
self.build()
exe = os.path.join(os.getcwd(), "a.out")
# Create a target from the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Set up our breakpoints:
main_bkpt = target.BreakpointCreateBySourceRegex(
"Set a breakpoint here.", lldb.SBFileSpec(self.source_name))
self.assertTrue(main_bkpt and
main_bkpt.GetNumLocations() == 1,
VALID_BREAKPOINT)
# Now launch the process, and do not stop at the entry point.
process = target.LaunchSimple(
None, None, self.get_process_working_directory())
self.assertTrue(process.GetState() == lldb.eStateStopped,
PROCESS_STOPPED)
threads = lldbutil.get_threads_stopped_at_breakpoint(
process, main_bkpt)
self.assertTrue(len(threads) == 1)
thread = threads[0]
frame = thread.GetFrameAtIndex(0)
mine = frame.FindVariable("mine")
self.assertTrue(mine.IsValid())
access_count = mine.GetChildMemberWithName("_access_count")
self.assertTrue(access_count.IsValid())
start_access_count = access_count.GetValueAsUnsigned(123456)
self.assertTrue(start_access_count != 123456)
#
# The first set of tests test calling the getter & setter of
# a property that actually only has a getter & setter and no
# @property.
#
nonexistant_value = frame.EvaluateExpression(
"mine.nonexistantInt", False)
nonexistant_error = nonexistant_value.GetError()
self.assertTrue(nonexistant_error.Success())
nonexistant_int = nonexistant_value.GetValueAsUnsigned(123456)
self.assertTrue(nonexistant_int == 6)
# Calling the getter function would up the access count, so make sure
# that happened.
new_access_count = access_count.GetValueAsUnsigned(123456)
self.assertTrue(new_access_count - start_access_count == 1)
start_access_count = new_access_count
#
# Now call the setter, then make sure that
nonexistant_change = frame.EvaluateExpression(
"mine.nonexistantInt = 10", False)
nonexistant_error = nonexistant_change.GetError()
self.assertTrue(nonexistant_error.Success())
# Calling the setter function would up the access count, so make sure
# that happened.
new_access_count = access_count.GetValueAsUnsigned(123456)
self.assertTrue(new_access_count - start_access_count == 1)
start_access_count = new_access_count
#
# Now we call the getter of a property that is backed by an ivar,
# make sure it works and that we actually update the backing ivar.
#
backed_value = frame.EvaluateExpression("mine.backedInt", False)
backed_error = backed_value.GetError()
self.assertTrue(backed_error.Success())
backing_value = mine.GetChildMemberWithName("_backedInt")
self.assertTrue(backing_value.IsValid())
self.assertTrue(backed_value.GetValueAsUnsigned(12345)
== backing_value.GetValueAsUnsigned(23456))
unbacked_value = frame.EvaluateExpression("mine.unbackedInt", False)
unbacked_error = unbacked_value.GetError()
self.assertTrue(unbacked_error.Success())
idWithProtocol_value = frame.EvaluateExpression(
"mine.idWithProtocol", False)
idWithProtocol_error = idWithProtocol_value.GetError()
self.assertTrue(idWithProtocol_error.Success())
self.assertTrue(idWithProtocol_value.GetTypeName() == "id")
# Make sure that class property getter works as expected
value = frame.EvaluateExpression("BaseClass.classInt", False)
self.assertTrue(value.GetError().Success())
self.assertTrue(value.GetValueAsUnsigned(11111) == 123)
# Make sure that class property setter works as expected
value = frame.EvaluateExpression("BaseClass.classInt = 234", False)
self.assertTrue(value.GetError().Success())
# Verify that setter above actually worked
value = frame.EvaluateExpression("BaseClass.classInt", False)
self.assertTrue(value.GetError().Success())
self.assertTrue(value.GetValueAsUnsigned(11111) == 234)
| 37.6 | 117 | 0.671922 | [
"Apache-2.0"
] | Polidea/SiriusObfuscator | SymbolExtractorAndRenamer/lldb/packages/Python/lldbsuite/test/lang/objc/objc-property/TestObjCProperty.py | 5,264 | Python |
# -*- coding: utf8 -*-
# Copyright (c) 2021 Niklas Rosenstein
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import logging
import re
from typing import Optional, Union
from databind.core import Converter, Context, Direction
from databind.core.mapper.objectmapper import ObjectMapper
from nr.utils.git import Git
from packaging.version import Version as _Version
logger = logging.getLogger(__name__)
class Version(_Version):
""" An extension of #packageing.version.Version which supports a
commit-distance and commit SHA suffix in the format of `-X-gY` (where
X is the distance and Y is the lowercase 7-character SHA sum). """
commit_distance: Optional[int]
def __init__(self, s: Union['Version', str]):
if isinstance(s, Version):
s = str(s)
elif not isinstance(s, str):
raise TypeError('expected Version or str, got {}'.format(type(s).__name__))
commit_distance: Optional[int]
sha: Optional[str]
match = re.match(r'(.*)-(\d+)-g([0-9a-f]{7})', s)
if match:
s = match.group(1)
commit_distance = int(match.group(2))
sha = match.group(3)
else:
commit_distance = None
sha = None
super().__init__(s)
self.commit_distance = commit_distance
self.sha = sha
def __str__(self):
s = super().__str__()
if self.commit_distance and self.sha:
s += '-{}-g{}'.format(self.commit_distance, self.sha)
return s
def __lt__(self, other):
if super().__lt__(other):
return True
if super().__eq__(other):
return (self.commit_distance or 0) < (other.commit_distance or 0)
return False
def __gt__(self, other):
return other < self and other != self
def __eq__(self, other):
if super().__eq__(other) is True:
return (self.commit_distance, self.sha) == (other.commit_distance, other.sha)
return False
def __ne__(self, other):
return not (self == other)
@property
def pep440_compliant(self):
return self.sha is None
def parse_version(version_string: str) -> Version:
return Version(version_string)
def bump_version(version: Version, kind: str) -> Version:
major, minor, patch, post = version.major, version.minor, version.micro, version.post
if kind == 'post':
if post is None:
post = ('post', 1)
else:
post = (post[0], post[1] + 1)
elif kind == 'patch':
post = None
patch += 1
elif kind == 'minor':
post = None
patch = 0
minor += 1
elif kind == 'major':
post = None
patch = minor = 0
major += 1
else:
raise ValueError('invalid kind: {!r}'.format(kind))
string = '%s.%s.%s' % (major, minor, patch)
if post:
string += '.post' + str(post)
return Version(string)
def get_commit_distance_version(repo_dir: str, version: Version, latest_tag: str) -> Optional[Version]:
"""
This function creates a string which describes the version of the
monorepo or package that includes the commit distance and SHA revision
number.
For a mono repository, the full commit distance is used. The same is true
for a single package. For a package inside a mono repository that does not
apply mono versioning, the packages' local commit distance is used.
This is close to what `git describe --tags` does. An example version number
generated by this function is: `0.1.0+24.gd9ade3f`. If the working state is
dirty, `.dirty` will be appended to the local version.
Notes:
- If there is no commit distance from the *latest_tag* to the current
state of the repository, this function returns None.
- The version returned by this function is a PEP440 local version that
cannot be used for packages when submitting them to PyPI.
- If the tag for the version of *subject* does not exist on the repository,
it will fall back to 0.0.0 as the version number which is treated as
"the beginning of the repository", even if no tag for this version exists.
Todo: We could try to find the previous tag for this subject and use that.
"""
git = Git(repo_dir)
dirty = git.has_diff()
if git.rev_parse(latest_tag):
distance = len(git.rev_list(latest_tag + '..HEAD'))
else:
logger.warning('tag "%s" does not exist', latest_tag)
version = Version('0.0.0')
distance = len(git.rev_list('HEAD'))
if distance == 0:
if dirty:
return parse_version(str(version) + '+dirty')
return None
rev = git.rev_parse('HEAD')
assert rev, git
local = '+{}.g{}{}'.format(distance, rev[:7], '.dirty' if dirty else '')
return parse_version(str(version) + local)
class VersionConverter(Converter):
def convert(self, ctx: Context) -> object:
if ctx.direction == Direction.serialize:
return str(ctx.value)
else:
return parse_version(ctx.value)
from .utils import StringConverter
from . import mapper
mapper.add_converter_for_type(Version, StringConverter(parse_version)) # type: ignore
| 32.955307 | 103 | 0.698423 | [
"MIT"
] | NiklasRosenstein/shut | src/shut/model/version.py | 5,899 | Python |
"""
Off Multipage Cheatsheet
https://github.com/daniellewisDL/streamlit-cheat-sheet
@daniellewisDL : https://github.com/daniellewisDL
"""
import streamlit as st
from pathlib import Path
import base64
from modules.toc import *
# Initial page config
st.set_page_config(
page_title='Code Compendium Intro Page',
layout="wide",
# initial_sidebar_state="expanded",
)
# col2.title("Table of contents")
# col2.write("http://localhost:8502/#display-progress-and-status")
# toc.header("Header 1")
# toc.header("Header 2")
# toc.subheader("Subheader 1")
# toc.subheader("Subheader 2")
# toc.generate()
# Thanks to streamlitopedia for the following code snippet
def img_to_bytes(img_path):
img_bytes = Path(img_path).read_bytes()
encoded = base64.b64encode(img_bytes).decode()
return encoded
# sidebar
# def cs_sidebar():
# st.sidebar.markdown('''[<img src='data:image/png;base64,{}' class='img-fluid' width=32 height=32>](https://streamlit.io/)'''.format(img_to_bytes("logomark_website.png")), unsafe_allow_html=True)
# st.sidebar.header('Streamlit cheat sheet')
# st.sidebar.markdown('''
# <small>Summary of the [docs](https://docs.streamlit.io/en/stable/api.html), as of [Streamlit v1.0.0](https://www.streamlit.io/).</small>
# ''', unsafe_allow_html=True)
# st.sidebar.markdown('__How to install and import__')
# st.sidebar.code('$ pip install streamlit')
# st.sidebar.markdown('Import convention')
# st.sidebar.code('>>> import streamlit as st')
# st.sidebar.markdown('__Add widgets to sidebar__')
# st.sidebar.code('''
# st.sidebar.<widget>
# >>> a = st.sidebar.radio(\'R:\',[1,2])
# ''')
# st.sidebar.markdown('__Command line__')
# st.sidebar.code('''
# $ streamlit --help
# $ streamlit run your_script.py
# $ streamlit hello
# $ streamlit config show
# $ streamlit cache clear
# $ streamlit docs
# $ streamlit --version
# ''')
# st.sidebar.markdown('__Pre-release features__')
# st.sidebar.markdown('[Beta and experimental features](https://docs.streamlit.io/en/stable/api.html#beta-and-experimental-features)')
# st.sidebar.code('''
# pip uninstall streamlit
# pip install streamlit-nightly --upgrade
# ''')
# st.sidebar.markdown('''<small>[st.cheat_sheet v1.0.0](https://github.com/daniellewisDL/streamlit-cheat-sheet) | Oct 2021</small>''', unsafe_allow_html=True)
# return None
##########################
# Main body of cheat sheet
##########################
def div():
def cs_body():
col1, col2 = st.columns(2)
col1.title('Ryan Paik Coding Compendium')
col1.markdwon('''
“You don't learn to walk by following rules. You learn by doing, and by falling over.”
-Richard Branson
-----
''')
col1.subheader("Welcome to my Code Compendium.")
col1.markdown('''
This website/webapp is my personal cheatsheet for of all the code snippets that I have needed over the past 2 years. This ended up being a quick detour into Streamlit that I fell in love with while I was building flask api's.
-----
**Programming is only as deep as you want to dive in.**
This webapp features the basic code snippets from all the "googling" from programming I have done.
I have taken the plunge and have created my own markdown notebooks organizing information from quick solution tidbits to documentation for programming languages.
Please visit my github for practical code and my research notebooks:
*[rypaik (Ryan Paik) · GitHub](https://github.com/rypaik)*
If you would like access to my Gist please email me.
[email protected]
-----
**Bio:**
Currently a Sophomore at University of Illinois at Urbana-Champaign
Working Nights on my degree from the System Engineering Program
**Hobbies:**
Trying to become a real guitar hero minus the game system, playing Valorant with the St Mark's crew, getting interesting eats no matter where I am, and playing toss with my baseball field rat of a cousin.
The newest hobby is figuring out what I can build with all the new breakthroughs in technology.
**Currently Working On**
Frameworks and Languages:
- Flask, Django, FastAPI, PyTorch, Streamlit, OpenCV, shell scripting, Python, C++
Databases:
- Postgres, Redis, MongoDB, and applicable ORMs
When I can get up for Air:
- React, swift(ios), Rust, GO!!
- Find a team to get a paper In Arxiv
**This site will be constantly updated as long as I program. Feel free to pass on the URL.**
''')
# col2.subheader('Display interactive widgets')
# col2.code('''
# st.button('Hit me')
# st.download_button('On the dl', data)
# st.checkbox('Check me out')
# st.radio('Radio', [1,2,3])
# st.selectbox('Select', [1,2,3])
# st.multiselect('Multiselect', [1,2,3])
# st.slider('Slide me', min_value=0, max_value=10)
# st.select_slider('Slide to select', options=[1,'2'])
# st.text_input('Enter some text')
# st.number_input('Enter a number')
# st.text_area('Area for textual entry')
# st.date_input('Date input')
# st.time_input('Time entry')
# st.file_uploader('File uploader')
# st.color_picker('Pick a color')
# ''')
# col2.write('Use widgets\' returned values in variables:')
# col2.code('''
# >>> for i in range(int(st.number_input('Num:'))): foo()
# >>> if st.sidebar.selectbox('I:',['f']) == 'f': b()
# >>> my_slider_val = st.slider('Quinn Mallory', 1, 88)
# >>> st.write(slider_val)
# ''')
# # Control flow
# col2.subheader('Control flow')
# col2.code('''
# st.stop()
# ''')
# # Lay out your app
# col2.subheader('Lay out your app')
# col2.code('''
# st.form('my_form_identifier')
# st.form_submit_button('Submit to me')
# st.container()
# st.columns(spec)
# >>> col1, col2 = st.columns(2)
# >>> col1.subheader('Columnisation')
# st.expander('Expander')
# >>> with st.expander('Expand'):
# >>> st.write('Juicy deets')
# ''')
# col2.write('Batch widgets together in a form:')
# col2.code('''
# >>> with st.form(key='my_form'):
# >>> text_input = st.text_input(label='Enter some text')
# >>> submit_button = st.form_submit_button(label='Submit')
# ''')
# # Display code
# col2.subheader('Display code')
# col2.code('''
# st.echo()
# >>> with st.echo():
# >>> st.write('Code will be executed and printed')
# ''')
# # Display progress and status
# col2.subheader('Display progress and status')
# col2.code('''
# st.progress(progress_variable_1_to_100)
# st.spinner()
# >>> with st.spinner(text='In progress'):
# >>> time.sleep(5)
# >>> st.success('Done')
# st.balloons()
# st.error('Error message')
# st.warning('Warning message')
# st.info('Info message')
# st.success('Success message')
# st.exception(e)
# ''')
# # Placeholders, help, and options
# col2.subheader('Placeholders, help, and options')
# col2.code('''
# st.empty()
# >>> my_placeholder = st.empty()
# >>> my_placeholder.text('Replaced!')
# st.help(pandas.DataFrame)
# st.get_option(key)
# st.set_option(key, value)
# st.set_page_config(layout='wide')
# ''')
# # Mutate data
# col2.subheader('Mutate data')
# col2.code('''
# DeltaGenerator.add_rows(data)
# >>> my_table = st.table(df1)
# >>> my_table.add_rows(df2)
# >>> my_chart = st.line_chart(df1)
# >>> my_chart.add_rows(df2)
# ''')
# # Optimize performance
# col2.subheader('Optimize performance')
# col2.code('''
# @st.cache
# >>> @st.cache
# ... def fetch_and_clean_data(url):
# ... # Mutate data at url
# ... return data
# >>> # Executes d1 as first time
# >>> d1 = fetch_and_clean_data(ref1)
# >>> # Does not execute d1; returns cached value, d1==d2
# >>> d2 = fetch_and_clean_data(ref1)
# >>> # Different arg, so function d1 executes
# >>> d3 = fetch_and_clean_data(ref2)
# ''')
# col2.subheader('Other key parts of the API')
# col2.markdown('''
# <small>[State API](https://docs.streamlit.io/en/stable/session_state_api.html)</small><br>
# <small>[Theme option reference](https://docs.streamlit.io/en/stable/theme_options.html)</small><br>
# <small>[Components API reference](https://docs.streamlit.io/en/stable/develop_streamlit_components.html)</small><br>
# <small>[API cheat sheet](https://share.streamlit.io/daniellewisdl/streamlit-cheat-sheet/app.py)</small><br>
# ''', unsafe_allow_html=True)
# Column 3 TOC Generator
# col3.subheader('test')
# toc = Toc(col3)
# # col2.title("Table of contents")
# col3.write("http://localhost:8502/#display-progress-and-status", unsafe_allow_html=True)
# toc.header("Header 1")
# toc.header("Header 2")
# toc.generate()
# toc.subheader("Subheader 1")
# toc.subheader("Subheader 2")
# toc.generate()
# return None
# Run main()
# if __name__ == '__main__':
# main()
# def main():
def app():
# cs_sidebar()
cs_body()
return None
| 26.845921 | 228 | 0.658226 | [
"MIT"
] | rypaik/Streamlit_Ref | .history/pages/intro_20220303160531.py | 8,907 | Python |
import glob
import os
from collections import defaultdict
import numpy as np
from yt.data_objects.static_output import ParticleDataset, ParticleFile
from yt.frontends.gadget.data_structures import _fix_unit_ordering
from yt.funcs import only_on_root, setdefaultattr
from yt.geometry.particle_geometry_handler import ParticleIndex
from yt.utilities.exceptions import YTException
from yt.utilities.logger import ytLogger as mylog
from yt.utilities.on_demand_imports import _h5py as h5py
from .fields import OWLSSubfindFieldInfo
class OWLSSubfindParticleIndex(ParticleIndex):
def __init__(self, ds, dataset_type):
super(OWLSSubfindParticleIndex, self).__init__(ds, dataset_type)
def _calculate_particle_index_starts(self):
# Halo indices are not saved in the file, so we must count by hand.
# File 0 has halos 0 to N_0 - 1, file 1 has halos N_0 to N_0 + N_1 - 1, etc.
particle_count = defaultdict(int)
offset_count = 0
for data_file in self.data_files:
data_file.index_start = dict(
[(ptype, particle_count[ptype]) for ptype in data_file.total_particles]
)
data_file.offset_start = offset_count
for ptype in data_file.total_particles:
particle_count[ptype] += data_file.total_particles[ptype]
offset_count += data_file.total_offset
def _calculate_file_offset_map(self):
# After the FOF is performed, a load-balancing step redistributes halos
# and then writes more fields. Here, for each file, we create a list of
# files which contain the rest of the redistributed particles.
ifof = np.array(
[data_file.total_particles["FOF"] for data_file in self.data_files]
)
isub = np.array([data_file.total_offset for data_file in self.data_files])
subend = isub.cumsum()
fofend = ifof.cumsum()
istart = np.digitize(fofend - ifof, subend - isub) - 1
iend = np.clip(np.digitize(fofend, subend), 0, ifof.size - 2)
for i, data_file in enumerate(self.data_files):
data_file.offset_files = self.data_files[istart[i] : iend[i] + 1]
def _detect_output_fields(self):
# TODO: Add additional fields
self._setup_filenames()
self._calculate_particle_index_starts()
self._calculate_file_offset_map()
dsl = []
units = {}
for dom in self.data_files:
fl, _units = self.io._identify_fields(dom)
units.update(_units)
dom._calculate_offsets(fl)
for f in fl:
if f not in dsl:
dsl.append(f)
self.field_list = dsl
ds = self.dataset
ds.particle_types = tuple(set(pt for pt, ds in dsl))
# This is an attribute that means these particle types *actually*
# exist. As in, they are real, in the dataset.
ds.field_units.update(units)
ds.particle_types_raw = ds.particle_types
class OWLSSubfindHDF5File(ParticleFile):
def __init__(self, ds, io, filename, file_id, bounds):
super(OWLSSubfindHDF5File, self).__init__(ds, io, filename, file_id, bounds)
with h5py.File(filename, mode="r") as f:
self.header = dict((field, f.attrs[field]) for field in f.attrs.keys())
class OWLSSubfindDataset(ParticleDataset):
_index_class = OWLSSubfindParticleIndex
_file_class = OWLSSubfindHDF5File
_field_info_class = OWLSSubfindFieldInfo
_suffix = ".hdf5"
def __init__(
self,
filename,
dataset_type="subfind_hdf5",
index_order=None,
index_filename=None,
units_override=None,
unit_system="cgs",
):
super(OWLSSubfindDataset, self).__init__(
filename,
dataset_type,
index_order=index_order,
index_filename=index_filename,
units_override=units_override,
unit_system=unit_system,
)
def _parse_parameter_file(self):
handle = h5py.File(self.parameter_filename, mode="r")
hvals = {}
hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
hvals["Massarr"] = hvals["MassTable"]
self.dimensionality = 3
self.refine_by = 2
# Set standard values
self.current_time = self.quan(hvals["Time_GYR"], "Gyr")
self.domain_left_edge = np.zeros(3, "float64")
self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
self.domain_dimensions = np.ones(3, "int32")
self.cosmological_simulation = 1
self.periodicity = (True, True, True)
self.current_redshift = hvals["Redshift"]
self.omega_lambda = hvals["OmegaLambda"]
self.omega_matter = hvals["Omega0"]
self.hubble_constant = hvals["HubbleParam"]
self.parameters = hvals
prefix = os.path.abspath(
os.path.join(
os.path.dirname(self.parameter_filename),
os.path.basename(self.parameter_filename).split(".", 1)[0],
)
)
suffix = self.parameter_filename.rsplit(".", 1)[-1]
self.filename_template = f"{prefix}.%(num)i.{suffix}"
self.file_count = len(glob.glob(prefix + "*" + self._suffix))
if self.file_count == 0:
raise YTException(message="No data files found.", ds=self)
self.particle_types = ("FOF", "SUBFIND")
self.particle_types_raw = ("FOF", "SUBFIND")
# To avoid having to open files twice
self._unit_base = {}
self._unit_base.update((str(k), v) for k, v in handle["/Units"].attrs.items())
handle.close()
def _set_code_unit_attributes(self):
# Set a sane default for cosmological simulations.
if self._unit_base is None and self.cosmological_simulation == 1:
only_on_root(mylog.info, "Assuming length units are in Mpc/h (comoving)")
self._unit_base = dict(length=(1.0, "Mpccm/h"))
# The other same defaults we will use from the standard Gadget
# defaults.
unit_base = self._unit_base or {}
if "length" in unit_base:
length_unit = unit_base["length"]
elif "UnitLength_in_cm" in unit_base:
if self.cosmological_simulation == 0:
length_unit = (unit_base["UnitLength_in_cm"], "cm")
else:
length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
else:
raise RuntimeError
length_unit = _fix_unit_ordering(length_unit)
setdefaultattr(self, "length_unit", self.quan(length_unit[0], length_unit[1]))
if "velocity" in unit_base:
velocity_unit = unit_base["velocity"]
elif "UnitVelocity_in_cm_per_s" in unit_base:
velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
else:
velocity_unit = (1e5, "cm/s * sqrt(a)")
velocity_unit = _fix_unit_ordering(velocity_unit)
setdefaultattr(
self, "velocity_unit", self.quan(velocity_unit[0], velocity_unit[1])
)
# We set hubble_constant = 1.0 for non-cosmology, so this is safe.
# Default to 1e10 Msun/h if mass is not specified.
if "mass" in unit_base:
mass_unit = unit_base["mass"]
elif "UnitMass_in_g" in unit_base:
if self.cosmological_simulation == 0:
mass_unit = (unit_base["UnitMass_in_g"], "g")
else:
mass_unit = (unit_base["UnitMass_in_g"], "g/h")
else:
# Sane default
mass_unit = (1.0, "1e10*Msun/h")
mass_unit = _fix_unit_ordering(mass_unit)
setdefaultattr(self, "mass_unit", self.quan(mass_unit[0], mass_unit[1]))
if "time" in unit_base:
time_unit = unit_base["time"]
elif "UnitTime_in_s" in unit_base:
time_unit = (unit_base["UnitTime_in_s"], "s")
else:
tu = (self.length_unit / self.velocity_unit).to("yr/h")
time_unit = (tu.d, tu.units)
setdefaultattr(self, "time_unit", self.quan(time_unit[0], time_unit[1]))
@classmethod
def _is_valid(self, *args, **kwargs):
need_groups = ["Constants", "Header", "Parameters", "Units", "FOF"]
veto_groups = []
valid = True
try:
fh = h5py.File(args[0], mode="r")
valid = all(ng in fh["/"] for ng in need_groups) and not any(
vg in fh["/"] for vg in veto_groups
)
fh.close()
except Exception:
valid = False
pass
return valid
| 40 | 87 | 0.620642 | [
"BSD-3-Clause-Clear"
] | neutrinoceros2/yt | yt/frontends/owls_subfind/data_structures.py | 8,720 | Python |
# -*- coding: utf-8 -*-
from devp2p import crypto
from quarkchain.rlp.utils import decode_hex
import random
import pytest
def get_ecc(secret=b''):
return crypto.ECCx(raw_privkey=crypto.mk_privkey(secret))
def test_valid_ecc():
for i in range(100):
e = get_ecc()
assert len(e.raw_pubkey) == 64
assert e.is_valid_key(e.raw_pubkey)
assert e.is_valid_key(e.raw_pubkey, e.raw_privkey)
pubkey = '\x00' * 64
assert not e.is_valid_key(pubkey)
def test_asymetric():
bob = get_ecc(b'secret2')
# enc / dec
plaintext = b"Hello Bob"
ciphertext = crypto.encrypt(plaintext, bob.raw_pubkey)
assert bob.decrypt(ciphertext) == plaintext
def test_signature():
bob = get_ecc(b'secret2')
# sign
message = crypto.sha3(b"Hello Alice")
signature = bob.sign(message)
# verify signature
assert crypto.verify(bob.raw_pubkey, signature, message) is True
assert crypto.ECCx(raw_pubkey=bob.raw_pubkey).verify(signature, message) is True
# wrong signature
message = crypto.sha3(b"Hello Alicf")
assert crypto.ECCx(raw_pubkey=bob.raw_pubkey).verify(signature, message) is False
assert crypto.verify(bob.raw_pubkey, signature, message) is False
def test_recover():
alice = get_ecc(b'secret1')
message = crypto.sha3(b'hello bob')
signature = alice.sign(message)
assert len(signature) == 65
assert crypto.verify(alice.raw_pubkey, signature, message) is True
recovered_pubkey = crypto.ecdsa_recover(message, signature)
assert len(recovered_pubkey) == 64
assert alice.raw_pubkey == recovered_pubkey
def test_get_ecdh_key():
privkey = decode_hex("332143e9629eedff7d142d741f896258f5a1bfab54dab2121d3ec5000093d74b")
remote_pubkey = decode_hex("f0d2b97981bd0d415a843b5dfe8ab77a30300daab3658c578f2340308a2da1a07f0821367332598b6aa4e180a41e92f4ebbae3518da847f0b1c0bbfe20bcf4e1")
agree_expected = decode_hex("ee1418607c2fcfb57fda40380e885a707f49000a5dda056d828b7d9bd1f29a08")
e = crypto.ECCx(raw_privkey=privkey)
agree = e.get_ecdh_key(remote_pubkey)
assert agree == agree_expected
def test_en_decrypt():
alice = crypto.ECCx()
bob = crypto.ECCx()
msg = b'test'
ciphertext = alice.encrypt(msg, bob.raw_pubkey)
assert bob.decrypt(ciphertext) == msg
def test_en_decrypt_shared_mac_data():
alice, bob = crypto.ECCx(), crypto.ECCx()
ciphertext = alice.encrypt('test', bob.raw_pubkey, shared_mac_data='shared mac data')
assert bob.decrypt(ciphertext, shared_mac_data=b'shared mac data') == b'test'
@pytest.mark.xfail(raises=crypto.ECIESDecryptionError)
def test_en_decrypt_shared_mac_data_fail():
alice, bob = crypto.ECCx(), crypto.ECCx()
ciphertext = alice.encrypt('test', bob.raw_pubkey, shared_mac_data='shared mac data')
bob.decrypt(ciphertext, shared_mac_data=b'wrong')
def test_privtopub():
priv = crypto.mk_privkey(b'test')
pub = crypto.privtopub(priv)
pub2 = crypto.ECCx(raw_privkey=priv).raw_pubkey
assert pub == pub2
def recover_1kb(times=1000):
alice = get_ecc(b'secret1')
message = ''.join(chr(random.randrange(0, 256)) for i in range(1024))
message = crypto.sha3(message.encode('utf-8'))
signature = alice.sign(message)
for i in range(times):
recovered_pubkey = crypto.ecdsa_recover(message, signature)
assert recovered_pubkey == alice.raw_pubkey
def test_recover2():
recover_1kb(times=1)
if __name__ == '__main__':
import time
st = time.time()
times = 100
recover_1kb(times=times)
print('took %.5f per recovery' % ((time.time() - st) / times))
| 30.694915 | 162 | 0.717835 | [
"MIT"
] | Kaushalop/pyquarkchain | devp2p/tests/test_crypto.py | 3,622 | Python |
import pytest
from django.urls import resolve, reverse
from car_rental.users.models import User
pytestmark = pytest.mark.django_db
def test_user_detail(user: User):
assert (
reverse("api:user-detail", kwargs={"username": user.username})
== f"/api/users/{user.username}/"
)
assert resolve(f"/api/users/{user.username}/").view_name == "api:user-detail"
def test_user_list():
assert reverse("api:user-list") == "/api/users/"
assert resolve("/api/users/").view_name == "api:user-list"
def test_user_me():
assert reverse("api:user-me") == "/api/users/me/"
assert resolve("/api/users/me/").view_name == "api:user-me"
| 26.52 | 81 | 0.669683 | [
"MIT"
] | TheHaRyPL/Car-rental | car_rental/car_rental/users/tests/test_drf_urls.py | 663 | Python |
#! -*- coding: utf-8 -*-
# 主要模型
import numpy as np
from bert4keras.layers import *
from bert4keras.snippets import insert_arguments
from bert4keras.snippets import delete_arguments
from bert4keras.snippets import is_string
from keras.models import Model
import json
class Transformer(object):
"""模型基类
"""
def __init__(
self,
vocab_size, # 词表大小
hidden_size, # 编码维度
num_hidden_layers, # Transformer总层数
num_attention_heads, # Attention的头数
intermediate_size, # FeedForward的隐层维度
hidden_act, # FeedForward隐层的激活函数
dropout_rate=None, # Dropout比例
embedding_size=None, # 是否指定embedding_size
attention_head_size=None, # Attention中V的head_size
attention_key_size=None, # Attention中Q,K的head_size
sequence_length=None, # 是否固定序列长度
keep_tokens=None, # 要保留的词ID列表
compound_tokens=None, # 扩展Embedding
residual_attention_scores=False, # Attention矩阵加残差
layers=None, # 外部传入的Keras层
prefix=None, # 层名前缀
name=None, # 模型名称
**kwargs
):
if keep_tokens is not None:
vocab_size = len(keep_tokens)
if compound_tokens is not None:
vocab_size += len(compound_tokens)
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.attention_head_size = attention_head_size or hidden_size // num_attention_heads
self.attention_key_size = attention_key_size or self.attention_head_size
self.intermediate_size = intermediate_size
self.dropout_rate = dropout_rate or 0
self.hidden_act = hidden_act
self.embedding_size = embedding_size or hidden_size
self.sequence_length = sequence_length
self.keep_tokens = keep_tokens
self.compound_tokens = compound_tokens
self.attention_bias = None
self.position_bias = None
self.attention_scores = None
self.residual_attention_scores = residual_attention_scores
self.layers = {} if layers is None else layers
self.prefix = prefix or ''
self.name = name
self.built = False
def build(
self,
attention_caches=None,
layer_norm_cond=None,
layer_norm_cond_hidden_size=None,
layer_norm_cond_hidden_act=None,
additional_input_layers=None,
**kwargs
):
"""模型构建函数
attention_caches:为Attention的K,V的缓存序列字典,格式为
{Attention层名: [K缓存, V缓存]};
layer_norm_*系列参数:实现Conditional Layer Normalization时使用,
用来实现以“固定长度向量”为条件的条件Bert。
"""
if self.built:
return None
# Input
inputs = self.get_inputs()
self.set_inputs(inputs, additional_input_layers)
# Other
self.attention_caches = attention_caches or {}
self.layer_norm_conds = [
layer_norm_cond,
layer_norm_cond_hidden_size,
layer_norm_cond_hidden_act or 'linear',
]
# Call
outputs = self.call(inputs)
self.set_outputs(outputs)
# Model
self.model = Model(self.inputs, self.outputs, name=self.name)
self.built = True
def call(self, inputs):
"""定义模型的执行流程
"""
# Embedding
outputs = self.apply_embeddings(inputs)
# Main
for i in range(self.num_hidden_layers):
outputs = self.apply_main_layers(outputs, i)
# Final
outputs = self.apply_final_layers(outputs)
return outputs
def prefixed(self, name):
"""给名字加前缀
"""
if name is not None:
return self.prefix + name
def apply(self, inputs=None, layer=None, arguments=None, **kwargs):
"""通过apply调用层会自动重用同名层
inputs: 上一层的输出;
layer: 要调用的层类名;
arguments: 传递给layer.call的参数;
kwargs: 传递给层初始化的参数。
"""
if layer is Dropout and self.dropout_rate == 0:
return inputs
if layer is MultiHeadAttention and self.residual_attention_scores:
kwargs['return_attention_scores'] = True
arguments = arguments or {}
name = self.prefixed(kwargs.get('name'))
kwargs['name'] = name
if name not in self.layers:
layer = layer(**kwargs)
name = layer.name
self.layers[name] = layer
if inputs is None:
return self.layers[name]
else:
if isinstance(self.layers[name], MultiHeadAttention):
if name in self.attention_caches:
# 如果检测到Cache的传入,那么自动在Key,Value处拼接起来
k_cache, v_cache = self.attention_caches[name]
k_name, v_name = name + '-Cached-Key', name + '-Cached-Value'
k = Concatenate1D(name=k_name)([k_cache, inputs[1]])
v = Concatenate1D(name=v_name)([v_cache, inputs[2]])
inputs = inputs[:1] + [k, v] + inputs[3:]
if self.residual_attention_scores:
# 如果使用残差Attention矩阵,则给每个Attention矩阵加上前上一层的Attention
# 矩阵,这对应RealFormer设计(https://arxiv.org/abs/2012.11747)。目前
# 该实现还相对粗糙,可能欠缺通用性。
if self.attention_scores is not None:
if arguments.get('a_bias'):
a_bias = Add(name=name + '-Attention-Bias'
)([inputs[3], self.attention_scores])
else:
a_bias = self.attention_scores
inputs = inputs[:3] + [a_bias] + inputs[4:]
arguments['a_bias'] = True
o, a = self.layers[name](inputs, **arguments)
self.attention_scores = a
return o
return self.layers[name](inputs, **arguments)
def get_inputs(self):
raise NotImplementedError
def apply_embeddings(self, inputs):
raise NotImplementedError
def apply_main_layers(self, inputs, index):
raise NotImplementedError
def apply_final_layers(self, inputs):
raise NotImplementedError
def compute_attention_bias(self, inputs=None):
"""定义每一层的Attention Bias
"""
return self.attention_bias
def compute_position_bias(self, inputs=None):
"""定义每一层的Position Bias(一般相对位置编码用)
"""
return self.position_bias
def set_inputs(self, inputs, additional_input_layers=None):
"""设置input和inputs属性
"""
if inputs is None:
inputs = []
elif not isinstance(inputs, list):
inputs = [inputs]
inputs = inputs[:]
if additional_input_layers is not None:
if not isinstance(additional_input_layers, list):
additional_input_layers = [additional_input_layers]
inputs.extend(additional_input_layers)
self.inputs = inputs
if len(inputs) > 1:
self.input = inputs
else:
self.input = inputs[0]
def set_outputs(self, outputs):
"""设置output和oututs属性
"""
if not isinstance(outputs, list):
outputs = [outputs]
outputs = outputs[:]
self.outputs = outputs
if len(outputs) > 1:
self.output = outputs
else:
self.output = outputs[0]
@property
def initializer(self):
"""默认使用截断正态分布初始化
"""
return keras.initializers.TruncatedNormal(stddev=0.02)
def simplify(self, inputs):
"""将list中的None过滤掉
"""
inputs = [i for i in inputs if i is not None]
if len(inputs) == 1:
inputs = inputs[0]
return inputs
def load_embeddings(self, embeddings):
"""处理Embedding层权重
"""
if self.keep_tokens is not None:
embeddings = embeddings[self.keep_tokens]
if self.compound_tokens is not None:
ext_embeddings = []
for item in self.compound_tokens:
if isinstance(item, list):
item = (item, [1] * len(item))
ext_embeddings.append(
np.average(embeddings[item[0]], 0, item[1])
)
embeddings = np.concatenate([embeddings, ext_embeddings], 0)
return embeddings
def load_variable(self, checkpoint, name):
"""加载单个变量的函数
"""
if isinstance(checkpoint, dict):
return checkpoint[name]
else:
return tf.train.load_variable(checkpoint, name)
def create_variable(self, name, value, dtype=None):
"""创建一个变量
"""
dtype = dtype or K.floatx()
return K.variable(
self.initializer(value.shape, dtype), dtype, name=name
), value
def variable_mapping(self):
"""构建keras层与checkpoint的变量名之间的映射表
"""
return {}
def load_weights_from_checkpoint(self, checkpoint, mapping=None):
"""根据mapping从checkpoint加载权重
"""
mapping = mapping or self.variable_mapping()
mapping = {self.prefixed(k): v for k, v in mapping.items()}
mapping = {k: v for k, v in mapping.items() if k in self.layers}
weight_value_pairs = []
for layer, variables in mapping.items():
layer = self.layers[layer]
weights = layer.trainable_weights
values = [self.load_variable(checkpoint, v) for v in variables]
if isinstance(layer, MultiHeadAttention):
"""如果key_size不等于head_size,则可以通过
正交矩阵将相应的权重投影到合适的shape。
"""
count = 2
if layer.use_bias:
count += 2
heads = self.num_attention_heads
head_size = self.attention_head_size
key_size = self.attention_key_size
W = np.linalg.qr(np.random.randn(key_size, head_size))[0].T
if layer.attention_scale:
W = W * key_size**0.25 / head_size**0.25
for i in range(count):
w, v = weights[i], values[i]
w_shape, v_shape = K.int_shape(w), v.shape
if w_shape[-1] != v_shape[-1]:
pre_shape = w_shape[:-1]
v = v.reshape(pre_shape + (heads, head_size))
v = np.dot(v, W)
v = v.reshape(pre_shape + (heads * key_size,))
values[i] = v
weight_value_pairs.extend(zip(weights, values))
K.batch_set_value(weight_value_pairs)
def save_weights_as_checkpoint(self, filename, mapping=None, dtype=None):
"""根据mapping将权重保存为checkpoint格式
"""
mapping = mapping or self.variable_mapping()
mapping = {self.prefixed(k): v for k, v in mapping.items()}
mapping = {k: v for k, v in mapping.items() if k in self.layers}
with tf.Graph().as_default():
all_variables, all_values = [], []
for layer, variables in mapping.items():
layer = self.layers[layer]
values = K.batch_get_value(layer.trainable_weights)
for name, value in zip(variables, values):
variable, value = self.create_variable(name, value, dtype)
all_variables.append(variable)
all_values.append(value)
with tf.Session() as sess:
K.batch_set_value(zip(all_variables, all_values))
saver = tf.train.Saver()
saver.save(sess, filename)
class LM_Mask(object):
"""定义下三角Attention Mask(语言模型用)
"""
def compute_attention_bias(self, inputs=None):
"""通过idxs序列的比较来得到对应的mask
"""
if self.attention_bias is None:
def lm_mask(s):
seq_len = K.shape(s)[1]
idxs = K.arange(0, seq_len)
mask = idxs[None, :] <= idxs[:, None]
mask = K.cast(mask, K.floatx())
return -(1 - mask[None, None]) * 1e12
self.attention_bias = self.apply(
inputs=self.inputs[0],
layer=Lambda,
function=lm_mask,
name='Attention-LM-Mask'
)
return self.attention_bias
class UniLM_Mask(object):
"""定义UniLM的Attention Mask(Seq2Seq模型用)
其中source和target的分区,由segment_ids来表示。
UniLM: https://arxiv.org/abs/1905.03197
"""
def compute_attention_bias(self, inputs=None):
"""通过idxs序列的比较来得到对应的mask
"""
if self.attention_bias is None:
def unilm_mask(s):
idxs = K.cumsum(s, axis=1)
mask = idxs[:, None, :] <= idxs[:, :, None]
mask = K.cast(mask, K.floatx())
return -(1 - mask[:, None]) * 1e12
self.attention_bias = self.apply(
inputs=self.inputs[1],
layer=Lambda,
function=unilm_mask,
name='Attention-UniLM-Mask'
)
return self.attention_bias
class BERT(Transformer):
"""构建BERT模型
"""
def __init__(
self,
max_position, # 序列最大长度
segment_vocab_size=2, # segment总数目
with_pool=False, # 是否包含Pool部分
with_nsp=False, # 是否包含NSP部分
with_mlm=False, # 是否包含MLM部分
hierarchical_position=None, # 是否层次分解位置编码
custom_position_ids=False, # 是否自行传入位置id
shared_segment_embeddings=False, # 若True,则segment跟token共用embedding
**kwargs # 其余参数
):
super(BERT, self).__init__(**kwargs)
self.max_position = max_position
self.segment_vocab_size = segment_vocab_size
self.with_pool = with_pool
self.with_nsp = with_nsp
self.with_mlm = with_mlm
self.hierarchical_position = hierarchical_position
self.custom_position_ids = custom_position_ids
self.shared_segment_embeddings = shared_segment_embeddings
if self.with_nsp and not self.with_pool:
self.with_pool = True
def get_inputs(self):
"""BERT的输入是token_ids和segment_ids
(但允许自行传入位置id,以实现一些特殊需求)
"""
x_in = self.apply(
layer=Input, shape=(self.sequence_length,), name='Input-Token'
)
inputs = [x_in]
if self.segment_vocab_size > 0:
s_in = self.apply(
layer=Input,
shape=(self.sequence_length,),
name='Input-Segment'
)
inputs.append(s_in)
if self.custom_position_ids:
p_in = self.apply(
layer=Input,
shape=(self.sequence_length,),
name='Input-Position'
)
inputs.append(p_in)
return inputs
def apply_embeddings(self, inputs):
"""BERT的embedding是token、position、segment三者embedding之和
"""
inputs = inputs[:]
x = inputs.pop(0)
if self.segment_vocab_size > 0:
s = inputs.pop(0)
if self.custom_position_ids:
p = inputs.pop(0)
else:
p = None
z = self.layer_norm_conds[0]
x = self.apply(
inputs=x,
layer=Embedding,
input_dim=self.vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
mask_zero=True,
name='Embedding-Token'
)
if self.segment_vocab_size > 0:
if self.shared_segment_embeddings:
name = 'Embedding-Token'
else:
name = 'Embedding-Segment'
s = self.apply(
inputs=s,
layer=Embedding,
input_dim=self.segment_vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
name=name
)
x = self.apply(
inputs=[x, s], layer=Add, name='Embedding-Token-Segment'
)
x = self.apply(
inputs=self.simplify([x, p]),
layer=PositionEmbedding,
input_dim=self.max_position,
output_dim=self.embedding_size,
merge_mode='add',
hierarchical=self.hierarchical_position,
embeddings_initializer=self.initializer,
custom_position_ids=self.custom_position_ids,
name='Embedding-Position'
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='Embedding-Norm'
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='Embedding-Dropout'
)
if self.embedding_size != self.hidden_size:
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
kernel_initializer=self.initializer,
name='Embedding-Mapping'
)
return x
def apply_main_layers(self, inputs, index):
"""BERT的主体是基于Self-Attention的模块
顺序:Att --> Add --> LN --> FFN --> Add --> LN
"""
x = inputs
z = self.layer_norm_conds[0]
attention_name = 'Transformer-%d-MultiHeadSelfAttention' % index
feed_forward_name = 'Transformer-%d-FeedForward' % index
attention_mask = self.compute_attention_bias(index)
# Self Attention
xi, x, arguments = x, [x, x, x], {'a_bias': None}
if attention_mask is not None:
arguments['a_bias'] = True
x.append(attention_mask)
x = self.apply(
inputs=x,
layer=MultiHeadAttention,
arguments=arguments,
heads=self.num_attention_heads,
head_size=self.attention_head_size,
out_dim=self.hidden_size,
key_size=self.attention_key_size,
kernel_initializer=self.initializer,
name=attention_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % attention_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % attention_name
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % attention_name
)
# Feed Forward
xi = x
x = self.apply(
inputs=x,
layer=FeedForward,
units=self.intermediate_size,
activation=self.hidden_act,
kernel_initializer=self.initializer,
name=feed_forward_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % feed_forward_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % feed_forward_name
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % feed_forward_name
)
return x
def apply_final_layers(self, inputs):
"""根据剩余参数决定输出
"""
x = inputs
z = self.layer_norm_conds[0]
outputs = [x]
if self.with_pool:
# Pooler部分(提取CLS向量)
x = outputs[0]
x = self.apply(
inputs=x,
layer=Lambda,
function=lambda x: x[:, 0],
name='Pooler'
)
pool_activation = 'tanh' if self.with_pool is True else self.with_pool
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
activation=pool_activation,
kernel_initializer=self.initializer,
name='Pooler-Dense'
)
if self.with_nsp:
# Next Sentence Prediction部分
x = self.apply(
inputs=x,
layer=Dense,
units=2,
activation='softmax',
kernel_initializer=self.initializer,
name='NSP-Proba'
)
outputs.append(x)
if self.with_mlm:
# Masked Language Model部分
x = outputs[0]
x = self.apply(
inputs=x,
layer=Dense,
units=self.embedding_size,
activation=self.hidden_act,
kernel_initializer=self.initializer,
name='MLM-Dense'
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='MLM-Norm'
)
x = self.apply(
inputs=x,
layer=Embedding,
arguments={'mode': 'dense'},
name='Embedding-Token'
)
x = self.apply(inputs=x, layer=BiasAdd, name='MLM-Bias')
mlm_activation = 'softmax' if self.with_mlm is True else self.with_mlm
x = self.apply(
inputs=x,
layer=Activation,
activation=mlm_activation,
name='MLM-Activation'
)
outputs.append(x)
if len(outputs) == 1:
outputs = outputs[0]
elif len(outputs) == 2:
outputs = outputs[1]
else:
outputs = outputs[1:]
return outputs
def load_variable(self, checkpoint, name):
"""加载单个变量的函数
"""
variable = super(BERT, self).load_variable(checkpoint, name)
if name in [
'bert/embeddings/word_embeddings',
'cls/predictions/output_bias',
]:
return self.load_embeddings(variable)
elif name == 'cls/seq_relationship/output_weights':
return variable.T
else:
return variable
def create_variable(self, name, value, dtype=None):
"""在tensorflow中创建一个变量
"""
if name == 'cls/seq_relationship/output_weights':
value = value.T
return super(BERT, self).create_variable(name, value, dtype)
def variable_mapping(self):
"""映射到官方BERT权重格式
"""
mapping = {
'Embedding-Token': ['bert/embeddings/word_embeddings'],
'Embedding-Segment': ['bert/embeddings/token_type_embeddings'],
'Embedding-Position': ['bert/embeddings/position_embeddings'],
'Embedding-Norm': [
'bert/embeddings/LayerNorm/beta',
'bert/embeddings/LayerNorm/gamma',
],
'Embedding-Mapping': [
'bert/encoder/embedding_hidden_mapping_in/kernel',
'bert/encoder/embedding_hidden_mapping_in/bias',
],
'Pooler-Dense': [
'bert/pooler/dense/kernel',
'bert/pooler/dense/bias',
],
'NSP-Proba': [
'cls/seq_relationship/output_weights',
'cls/seq_relationship/output_bias',
],
'MLM-Dense': [
'cls/predictions/transform/dense/kernel',
'cls/predictions/transform/dense/bias',
],
'MLM-Norm': [
'cls/predictions/transform/LayerNorm/beta',
'cls/predictions/transform/LayerNorm/gamma',
],
'MLM-Bias': ['cls/predictions/output_bias'],
}
for i in range(self.num_hidden_layers):
prefix = 'bert/encoder/layer_%d/' % i
mapping.update({
'Transformer-%d-MultiHeadSelfAttention' % i: [
prefix + 'attention/self/query/kernel',
prefix + 'attention/self/query/bias',
prefix + 'attention/self/key/kernel',
prefix + 'attention/self/key/bias',
prefix + 'attention/self/value/kernel',
prefix + 'attention/self/value/bias',
prefix + 'attention/output/dense/kernel',
prefix + 'attention/output/dense/bias',
],
'Transformer-%d-MultiHeadSelfAttention-Norm' % i: [
prefix + 'attention/output/LayerNorm/beta',
prefix + 'attention/output/LayerNorm/gamma',
],
'Transformer-%d-FeedForward' % i: [
prefix + 'intermediate/dense/kernel',
prefix + 'intermediate/dense/bias',
prefix + 'output/dense/kernel',
prefix + 'output/dense/bias',
],
'Transformer-%d-FeedForward-Norm' % i: [
prefix + 'output/LayerNorm/beta',
prefix + 'output/LayerNorm/gamma',
],
})
return mapping
class ALBERT(BERT):
"""构建ALBERT模型
"""
def apply_main_layers(self, inputs, index):
"""ALBERT的主体是基于Self-Attention的模块
顺序:Att --> Add --> LN --> FFN --> Add --> LN
"""
x = inputs
z = self.layer_norm_conds[0]
attention_name = 'Transformer-MultiHeadSelfAttention'
feed_forward_name = 'Transformer-FeedForward'
attention_mask = self.compute_attention_bias(index)
# Self Attention
xi, x, arguments = x, [x, x, x], {'a_bias': None}
if attention_mask is not None:
arguments['a_bias'] = True
x.append(attention_mask)
x = self.apply(
inputs=x,
layer=MultiHeadAttention,
arguments=arguments,
heads=self.num_attention_heads,
head_size=self.attention_head_size,
out_dim=self.hidden_size,
key_size=self.attention_key_size,
kernel_initializer=self.initializer,
name=attention_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % attention_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % attention_name
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % attention_name
)
# Feed Forward
xi = x
x = self.apply(
inputs=x,
layer=FeedForward,
units=self.intermediate_size,
activation=self.hidden_act,
kernel_initializer=self.initializer,
name=feed_forward_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % feed_forward_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % feed_forward_name
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % feed_forward_name
)
return x
def variable_mapping(self):
"""映射到官方ALBERT权重格式
"""
mapping = super(ALBERT, self).variable_mapping()
prefix = 'bert/encoder/transformer/group_0/inner_group_0/'
mapping.update({
'Transformer-MultiHeadSelfAttention': [
prefix + 'attention_1/self/query/kernel',
prefix + 'attention_1/self/query/bias',
prefix + 'attention_1/self/key/kernel',
prefix + 'attention_1/self/key/bias',
prefix + 'attention_1/self/value/kernel',
prefix + 'attention_1/self/value/bias',
prefix + 'attention_1/output/dense/kernel',
prefix + 'attention_1/output/dense/bias',
],
'Transformer-MultiHeadSelfAttention-Norm': [
prefix + 'LayerNorm/beta',
prefix + 'LayerNorm/gamma',
],
'Transformer-FeedForward': [
prefix + 'ffn_1/intermediate/dense/kernel',
prefix + 'ffn_1/intermediate/dense/bias',
prefix + 'ffn_1/intermediate/output/dense/kernel',
prefix + 'ffn_1/intermediate/output/dense/bias',
],
'Transformer-FeedForward-Norm': [
prefix + 'LayerNorm_1/beta',
prefix + 'LayerNorm_1/gamma',
],
})
return mapping
class ALBERT_Unshared(BERT):
"""解开ALBERT共享约束,当成BERT用
"""
def variable_mapping(self):
"""映射到官方ALBERT权重格式
"""
mapping = super(ALBERT_Unshared, self).variable_mapping()
prefix = 'bert/encoder/transformer/group_0/inner_group_0/'
for i in range(self.num_hidden_layers):
mapping.update({
'Transformer-%d-MultiHeadSelfAttention' % i: [
prefix + 'attention_1/self/query/kernel',
prefix + 'attention_1/self/query/bias',
prefix + 'attention_1/self/key/kernel',
prefix + 'attention_1/self/key/bias',
prefix + 'attention_1/self/value/kernel',
prefix + 'attention_1/self/value/bias',
prefix + 'attention_1/output/dense/kernel',
prefix + 'attention_1/output/dense/bias',
],
'Transformer-%d-MultiHeadSelfAttention-Norm' % i: [
prefix + 'LayerNorm/beta',
prefix + 'LayerNorm/gamma',
],
'Transformer-%d-FeedForward' % i: [
prefix + 'ffn_1/intermediate/dense/kernel',
prefix + 'ffn_1/intermediate/dense/bias',
prefix + 'ffn_1/intermediate/output/dense/kernel',
prefix + 'ffn_1/intermediate/output/dense/bias',
],
'Transformer-%d-FeedForward-Norm' % i: [
prefix + 'LayerNorm_1/beta',
prefix + 'LayerNorm_1/gamma',
],
})
return mapping
class NEZHA(BERT):
"""华为推出的NAZHA模型
链接:https://arxiv.org/abs/1909.00204
"""
def apply_embeddings(self, inputs):
"""NEZHA的embedding是token、segment两者embedding之和
"""
inputs = inputs[:]
x = inputs.pop(0)
if self.segment_vocab_size > 0:
s = inputs.pop(0)
z = self.layer_norm_conds[0]
x = self.apply(
inputs=x,
layer=Embedding,
input_dim=self.vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
mask_zero=True,
name='Embedding-Token'
)
if self.segment_vocab_size > 0:
if self.shared_segment_embeddings:
name = 'Embedding-Token'
else:
name = 'Embedding-Segment'
s = self.apply(
inputs=s,
layer=Embedding,
input_dim=2,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
name=name
)
x = self.apply(
inputs=[x, s], layer=Add, name='Embedding-Token-Segment'
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='Embedding-Norm'
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='Embedding-Dropout'
)
if self.embedding_size != self.hidden_size:
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
kernel_initializer=self.initializer,
name='Embedding-Mapping'
)
return x
def apply_main_layers(self, inputs, index):
"""NEZHA的主体是基于Self-Attention的模块
顺序:Att --> Add --> LN --> FFN --> Add --> LN
"""
x = inputs
z = self.layer_norm_conds[0]
attention_name = 'Transformer-%d-MultiHeadSelfAttention' % index
feed_forward_name = 'Transformer-%d-FeedForward' % index
attention_mask = self.compute_attention_bias(index)
position_bias = self.compute_position_bias(x)
# Self Attention
xi, x = x, [x, x, x, position_bias]
arguments = {'a_bias': None, 'p_bias': 'typical_relative'}
if attention_mask is not None:
arguments['a_bias'] = True
x.insert(3, attention_mask)
x = self.apply(
inputs=x,
layer=MultiHeadAttention,
arguments=arguments,
heads=self.num_attention_heads,
head_size=self.attention_head_size,
out_dim=self.hidden_size,
key_size=self.attention_key_size,
kernel_initializer=self.initializer,
name=attention_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % attention_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % attention_name
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % attention_name
)
# Feed Forward
xi = x
x = self.apply(
inputs=x,
layer=FeedForward,
units=self.intermediate_size,
activation=self.hidden_act,
kernel_initializer=self.initializer,
name=feed_forward_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % feed_forward_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % feed_forward_name
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % feed_forward_name
)
return x
def compute_position_bias(self, inputs=None):
"""经典相对位置编码
"""
if self.position_bias is None:
x = inputs
self.position_bias = self.apply(
inputs=[x, x],
layer=RelativePositionEmbedding,
input_dim=2 * 64 + 1,
output_dim=self.attention_head_size,
embeddings_initializer='Sinusoidal',
name='Embedding-Relative-Position',
trainable=False
)
return self.position_bias
class ELECTRA(BERT):
"""Google推出的ELECTRA模型
链接:https://arxiv.org/abs/2003.10555
"""
@insert_arguments(with_discriminator=False)
@delete_arguments('with_pool', 'with_mlm')
def __init__(
self,
max_position, # 序列最大长度
**kwargs # 其余参数
):
super(ELECTRA, self).__init__(max_position, **kwargs)
def apply_final_layers(self, inputs):
x = inputs
if self.with_discriminator:
if self.with_discriminator is True:
final_activation = 'sigmoid'
else:
final_activation = self.with_discriminator
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
activation=self.hidden_act,
kernel_initializer=self.initializer,
name='Discriminator-Dense'
)
x = self.apply(
inputs=x,
layer=Dense,
units=1,
activation=final_activation,
kernel_initializer=self.initializer,
name='Discriminator-Prediction'
)
return x
def load_variable(self, checkpoint, name):
"""加载单个变量的函数
"""
variable = super(ELECTRA, self).load_variable(checkpoint, name)
if name == 'electra/embeddings/word_embeddings':
return self.load_embeddings(variable)
else:
return variable
def variable_mapping(self):
mapping = super(ELECTRA, self).variable_mapping()
mapping['Embedding-Mapping'] = [
'electra/embeddings_project/kernel',
'electra/embeddings_project/bias',
]
mapping = {
k: [i.replace('bert/', 'electra/') for i in v]
for k, v in mapping.items()
}
mapping['Discriminator-Dense'] = [
'discriminator_predictions/dense/kernel',
'discriminator_predictions/dense/bias',
]
mapping['Discriminator-Prediction'] = [
'discriminator_predictions/dense_1/kernel',
'discriminator_predictions/dense_1/bias',
]
return mapping
class GPT(LM_Mask, BERT):
"""构建GPT模型
链接:https://github.com/openai/finetune-transformer-lm
"""
@insert_arguments(final_activation='softmax')
@delete_arguments('with_pool', 'with_mlm')
def __init__(self, **kwargs):
super(GPT, self).__init__(**kwargs)
def apply_embeddings(self, inputs):
"""GPT的embedding是token、position、segment三者embedding之和
跟BERT的主要区别是三者相加之后没有加LayerNormalization层。
"""
inputs = inputs[:]
x = inputs.pop(0)
if self.segment_vocab_size > 0:
s = inputs.pop(0)
if self.custom_position_ids:
p = inputs.pop(0)
else:
p = None
x = self.apply(
inputs=x,
layer=Embedding,
input_dim=self.vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
mask_zero=True,
name='Embedding-Token'
)
if self.segment_vocab_size > 0:
if self.shared_segment_embeddings:
name = 'Embedding-Token'
else:
name = 'Embedding-Segment'
s = self.apply(
inputs=s,
layer=Embedding,
input_dim=self.segment_vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
name=name
)
x = self.apply(
inputs=[x, s], layer=Add, name='Embedding-Token-Segment'
)
x = self.apply(
inputs=self.simplify([x, p]),
layer=PositionEmbedding,
input_dim=self.max_position,
output_dim=self.embedding_size,
merge_mode='add',
hierarchical=self.hierarchical_position,
embeddings_initializer=self.initializer,
custom_position_ids=self.custom_position_ids,
name='Embedding-Position'
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='Embedding-Dropout'
)
if self.embedding_size != self.hidden_size:
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
kernel_initializer=self.initializer,
name='Embedding-Mapping'
)
return x
def apply_final_layers(self, inputs):
"""剩余部分
"""
x = inputs
# Language Model部分
x = self.apply(
inputs=x,
layer=Embedding,
arguments={'mode': 'dense'},
name='Embedding-Token'
)
x = self.apply(
inputs=x,
layer=Activation,
activation=self.final_activation,
name='LM-Activation'
)
return x
def load_variable(self, checkpoint, name):
"""加载单个变量的函数
"""
variable = super(GPT, self).load_variable(checkpoint, name)
if name == 'gpt/embeddings/word_embeddings':
return self.load_embeddings(variable)
else:
return variable
def variable_mapping(self):
"""映射到TF版GPT权重格式
"""
mapping = super(GPT, self).variable_mapping()
mapping = {
k: [
i.replace('bert/', 'gpt/').replace('encoder', 'transformer')
for i in v
]
for k, v in mapping.items()
}
return mapping
class GPT2(GPT):
"""构建GPT2模型
链接: https://github.com/openai/gpt-2
"""
def get_inputs(self):
"""GPT2的输入是token_ids
"""
x_in = self.apply(
layer=Input, shape=(self.sequence_length,), name='Input-Token'
)
return x_in
def apply_embeddings(self, inputs):
"""GPT2的embedding是token、position两者embedding之和
"""
x = inputs
x = self.apply(
inputs=x,
layer=Embedding,
input_dim=self.vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
mask_zero=True,
name='Embedding-Token'
)
x = self.apply(
inputs=x,
layer=PositionEmbedding,
input_dim=self.max_position,
output_dim=self.embedding_size,
merge_mode='add',
hierarchical=self.hierarchical_position,
embeddings_initializer=self.initializer,
name='Embedding-Position'
)
if self.embedding_size != self.hidden_size:
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
kernel_initializer=self.initializer,
name='Embedding-Mapping'
)
return x
def apply_main_layers(self, inputs, index):
"""GPT2的主体是基于Self-Attention的模块
顺序:LN --> Att --> Add --> LN --> FFN --> Add
"""
x = inputs
z = self.layer_norm_conds[0]
attention_name = 'Transformer-%d-MultiHeadSelfAttention' % index
feed_forward_name = 'Transformer-%d-FeedForward' % index
attention_mask = self.compute_attention_bias(index)
# Self Attention
xi = x
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
epsilon=1e-5,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % attention_name
)
x = self.apply(
inputs=[x, x, x, attention_mask],
layer=MultiHeadAttention,
arguments={'a_bias': True},
heads=self.num_attention_heads,
head_size=self.attention_head_size,
out_dim=self.hidden_size,
key_size=self.attention_key_size,
kernel_initializer=self.initializer,
name=attention_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % attention_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % attention_name
)
# Feed Forward
xi = x
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
epsilon=1e-5,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % feed_forward_name
)
x = self.apply(
inputs=x,
layer=FeedForward,
units=self.intermediate_size,
activation=self.hidden_act,
kernel_initializer=self.initializer,
name=feed_forward_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % feed_forward_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % feed_forward_name
)
return x
def apply_final_layers(self, inputs):
"""剩余部分
"""
x = inputs
z = self.layer_norm_conds[0]
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
epsilon=1e-5,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='Output-Norm'
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='Output-Dropout'
)
x = super(GPT2, self).apply_final_layers(x)
return x
def variable_mapping(self):
"""映射到TF版GPT2权重格式
"""
mapping = super(GPT2, self).variable_mapping()
mapping = {
k: [i.replace('output/LayerNorm', 'input/LayerNorm') for i in v]
for k, v in mapping.items()
}
mapping['Output-Norm'] = [
'gpt/output/LayerNorm/beta',
'gpt/output/LayerNorm/gamma',
]
return mapping
class GPT2_ML(GPT):
"""构建GPT2_ML模型
链接: https://github.com/imcaspar/gpt2-ml
注意:GPT2_ML虽然号称GPT2,但是它的结构其实更接近GPT,它自称GPT2的
原因大概是因为它开源的版本参数量达到了GPT2的15亿参数。
"""
def get_inputs(self):
"""GPT2_ML的输入是token_ids
"""
x_in = self.apply(
layer=Input, shape=(self.sequence_length,), name='Input-Token'
)
return x_in
def apply_embeddings(self, inputs):
"""GPT2_ML的embedding是token、position两者embedding之和
"""
x = inputs
z = self.layer_norm_conds[0]
x = self.apply(
inputs=x,
layer=Embedding,
input_dim=self.vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
mask_zero=True,
name='Embedding-Token'
)
x = self.apply(
inputs=x,
layer=PositionEmbedding,
input_dim=self.max_position,
output_dim=self.embedding_size,
merge_mode='add',
hierarchical=self.hierarchical_position,
embeddings_initializer=self.initializer,
name='Embedding-Position'
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
epsilon=1e-5,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='Embedding-Norm'
)
if self.embedding_size != self.hidden_size:
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
kernel_initializer=self.initializer,
name='Embedding-Mapping'
)
return x
def apply_main_layers(self, inputs, index):
"""GPT2_ML的主体是基于Self-Attention的模块
顺序:Att --> LN --> FFN --> Add --> LN
"""
x = inputs
z = self.layer_norm_conds[0]
attention_name = 'Transformer-%d-MultiHeadSelfAttention' % index
feed_forward_name = 'Transformer-%d-FeedForward' % index
attention_mask = self.compute_attention_bias(index)
# Self Attention
xi, x, arguments = x, [x, x, x, attention_mask], {'a_bias': True}
x = self.apply(
inputs=x,
layer=MultiHeadAttention,
arguments=arguments,
heads=self.num_attention_heads,
head_size=self.attention_head_size,
out_dim=self.hidden_size,
key_size=self.attention_key_size,
kernel_initializer=self.initializer,
name=attention_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % attention_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % attention_name
)
# Feed Forward
xi = x
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
epsilon=1e-5,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm-0' % feed_forward_name
)
x = self.apply(
inputs=x,
layer=FeedForward,
units=self.intermediate_size,
activation=self.hidden_act,
kernel_initializer=self.initializer,
name=feed_forward_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % feed_forward_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % feed_forward_name
)
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
epsilon=1e-5,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm-1' % feed_forward_name
)
return x
def load_variable(self, checkpoint, name):
"""加载单个变量的函数
"""
variable = super(GPT2_ML, self).load_variable(checkpoint, name)
if name == 'newslm/embeddings/word_embed':
return self.load_embeddings(variable)
else:
return variable
def variable_mapping(self):
"""映射到官方GPT2_ML权重格式
"""
mapping = {
'Embedding-Token': ['newslm/embeddings/word_embed'],
'Embedding-Position': ['newslm/embeddings/pos_embed'],
'Embedding-Norm': [
'newslm/embeddings/LayerNorm_embed_norm/beta',
'newslm/embeddings/LayerNorm_embed_norm/gamma',
],
}
for i in range(self.num_hidden_layers):
prefix = 'newslm/layer%02d/' % i
mapping.update({
'Transformer-%d-MultiHeadSelfAttention' % i: [
prefix + 'query_layer/kernel',
prefix + 'query_layer/bias',
prefix + 'key_layer/kernel',
prefix + 'key_layer/bias',
prefix + 'value_layer/kernel',
prefix + 'value_layer/bias',
prefix + 'context_projection_layer/kernel',
prefix + 'context_projection_layer/bias',
],
'Transformer-%d-FeedForward-Norm-0' % i: [
prefix + 'LayerNorm_mlp_ln0/beta',
prefix + 'LayerNorm_mlp_ln0/gamma',
],
'Transformer-%d-FeedForward' % i: [
prefix + 'intermediate/kernel',
prefix + 'intermediate/bias',
prefix + 'output/kernel',
prefix + 'output/bias',
],
'Transformer-%d-FeedForward-Norm-1' % i: [
prefix + 'LayerNorm_mlp_ln1/beta',
prefix + 'LayerNorm_mlp_ln1/gamma',
],
})
return mapping
class T5_Base(Transformer):
"""Google的T5模型(基类)
注意T5有两个版本,一开始放出来的版本称为t5.1.0,而后来放出了一个升级
版本称为t5.1.1,两者结构略有不同,包括后来放出来的多国语言版T5也采用
了t5.1.1的结构。
t5.1.0: https://github.com/google-research/text-to-text-transfer-transformer
t5.1.1: https://github.com/google-research/text-to-text-transfer-transformer/blob/master/released_checkpoints.md#t511
multilingual-t5: https://github.com/google-research/multilingual-t5
"""
@insert_arguments(version='t5.1.0')
def __init__(self, **kwargs):
super(T5_Base, self).__init__(**kwargs)
def load_variable(self, checkpoint, name):
"""加载单个变量的函数
"""
variable = super(T5_Base, self).load_variable(checkpoint, name)
if name == 'shared/embedding':
return self.load_embeddings(variable)
elif name == 'decoder/logits/kernel':
return self.load_embeddings(variable.T).T
elif 'relative_attention_bias' in name:
return variable.T
else:
return variable
def create_variable(self, name, value, dtype=None):
"""在tensorflow中创建一个变量
"""
if 'relative_attention_bias' in name:
value = value.T
return super(T5_Base, self).create_variable(name, value, dtype)
def variable_mapping(self):
"""映射到官方T5权重格式
"""
mapping = {
'Embedding-Token': ['shared/embedding'],
'Encoder-Embedding-Relative-Position': [
'encoder/block_000/layer_000/SelfAttention/relative_attention_bias'
],
'Encoder-Output-Norm': ['encoder/final_layer_norm/scale'],
'Decoder-Embedding-Relative-Position': [
'decoder/block_000/layer_000/SelfAttention/relative_attention_bias',
],
'Decoder-Output-Norm': ['decoder/final_layer_norm/scale'],
}
for i in range(self.num_hidden_layers):
# Encoder主体
prefix = 'encoder/block_%03d/' % i
mapping.update({
'Encoder-Transformer-%d-MultiHeadSelfAttention' % i: [
prefix + 'layer_000/SelfAttention/q',
prefix + 'layer_000/SelfAttention/k',
prefix + 'layer_000/SelfAttention/v',
prefix + 'layer_000/SelfAttention/o',
],
'Encoder-Transformer-%d-MultiHeadSelfAttention-Norm' % i: [
prefix + 'layer_000/layer_norm/scale',
],
'Encoder-Transformer-%d-FeedForward' % i: [
prefix + 'layer_001/DenseReluDense/wi/kernel',
prefix + 'layer_001/DenseReluDense/wo/kernel',
],
'Encoder-Transformer-%d-FeedForward-Norm' % i: [
prefix + 'layer_001/layer_norm/scale',
],
})
# Decoder主体
prefix = 'decoder/block_%03d/' % i
mapping.update({
'Decoder-Transformer-%d-MultiHeadSelfAttention' % i: [
prefix + 'layer_000/SelfAttention/q',
prefix + 'layer_000/SelfAttention/k',
prefix + 'layer_000/SelfAttention/v',
prefix + 'layer_000/SelfAttention/o',
],
'Decoder-Transformer-%d-MultiHeadSelfAttention-Norm' % i: [
prefix + 'layer_000/layer_norm/scale',
],
'Decoder-Transformer-%d-MultiHeadCrossAttention' % i: [
prefix + 'layer_001/EncDecAttention/q',
prefix + 'layer_001/EncDecAttention/k',
prefix + 'layer_001/EncDecAttention/v',
prefix + 'layer_001/EncDecAttention/o',
],
'Decoder-Transformer-%d-MultiHeadCrossAttention-Norm' % i: [
prefix + 'layer_001/layer_norm/scale',
],
'Decoder-Transformer-%d-FeedForward' % i: [
prefix + 'layer_002/DenseReluDense/wi/kernel',
prefix + 'layer_002/DenseReluDense/wo/kernel',
],
'Decoder-Transformer-%d-FeedForward-Norm' % i: [
prefix + 'layer_002/layer_norm/scale',
],
})
if self.version == 't5.1.1':
mapping['Encoder-Output-Norm'] = ['encoder/rms_norm/scale']
mapping['Decoder-Output-Norm'] = ['decoder/rms_norm/scale']
mapping['Decoder-Output-LM'] = ['decoder/logits/kernel']
mapping = {
k: [i.replace('layer_norm', 'rms_norm') for i in v]
for k, v in mapping.items()
}
for i in range(self.num_hidden_layers):
for layer in [
'Encoder-Transformer-%d-FeedForward' % i,
'Decoder-Transformer-%d-FeedForward' % i
]:
mapping[layer] = [
mapping[layer][0][:-7] + '_0' + mapping[layer][0][-7:],
mapping[layer][0][:-7] + '_1' + mapping[layer][0][-7:],
mapping[layer][1]
]
return mapping
class T5_Encoder(T5_Base):
"""Google的T5模型(Encoder)
"""
def get_inputs(self):
"""T5的Encoder的输入只有token_ids
"""
x_in = self.apply(
layer=Input,
shape=(self.sequence_length,),
name='Encoder-Input-Token'
)
return x_in
def apply_embeddings(self, inputs):
"""T5的embedding只有token embedding,
并把relative position embedding准备好,待attention使用。
"""
x = inputs
x = self.apply(
inputs=x,
layer=Embedding,
input_dim=self.vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
mask_zero=True,
name='Embedding-Token'
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='Encoder-Embedding-Dropout'
)
if self.embedding_size != self.hidden_size:
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
kernel_initializer=self.initializer,
name='Encoder-Embedding-Mapping'
)
return x
def apply_main_layers(self, inputs, index):
"""T5的Encoder的主体是基于Self-Attention的模块
顺序:LN --> Att --> Add --> LN --> FFN --> Add
"""
x = inputs
z = self.layer_norm_conds[0]
attention_name = 'Encoder-Transformer-%d-MultiHeadSelfAttention' % index
feed_forward_name = 'Encoder-Transformer-%d-FeedForward' % index
attention_mask = self.compute_attention_bias(index)
position_bias = self.compute_position_bias(x)
# Self Attention
xi = x
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
center=False,
epsilon=1e-6,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % attention_name
)
x = self.apply(
inputs=[x, x, x, position_bias],
layer=MultiHeadAttention,
arguments={'p_bias': 't5_relative'},
heads=self.num_attention_heads,
head_size=self.attention_head_size,
out_dim=self.hidden_size,
key_size=self.attention_key_size,
use_bias=False,
attention_scale=False,
kernel_initializer=self.initializer,
name=attention_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % attention_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % attention_name
)
# Feed Forward
xi = x
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
center=False,
epsilon=1e-6,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % feed_forward_name
)
x = self.apply(
inputs=x,
layer=FeedForward,
units=self.intermediate_size,
activation=self.hidden_act,
use_bias=False,
kernel_initializer=self.initializer,
name=feed_forward_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % feed_forward_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % feed_forward_name
)
return x
def apply_final_layers(self, inputs):
"""剩余部分
"""
x = inputs
z = self.layer_norm_conds[0]
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
center=False,
epsilon=1e-6,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='Encoder-Output-Norm'
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='Encoder-Output-Dropout'
)
return x
def compute_position_bias(self, inputs=None):
"""T5相对位置编码
"""
if self.position_bias is None:
x = inputs
p = self.apply(
inputs=[x, x],
layer=RelativePositionEmbeddingT5,
input_dim=32,
output_dim=self.num_attention_heads,
bidirectional=True,
embeddings_initializer=self.initializer,
name='Encoder-Embedding-Relative-Position'
)
self.position_bias = p
return self.position_bias
class T5_Decoder(LM_Mask, T5_Base):
"""Google的T5模型(Decoder)
"""
def __init__(self, with_lm=True, **kwargs):
super(T5_Decoder, self).__init__(**kwargs)
self.with_lm = with_lm
def get_inputs(self):
"""T5的Decoder的输入为context序列和token_ids
"""
c_in = self.apply(
layer=Input,
shape=(self.sequence_length, self.hidden_size),
name='Input-Context'
)
x_in = self.apply(
layer=Input,
shape=(self.sequence_length,),
name='Decoder-Input-Token'
)
return [c_in, x_in]
def apply_embeddings(self, inputs):
"""T5的embedding只有token embedding,
并把relative position embedding准备好,待attention使用。
"""
c, x = inputs
x = self.apply(
inputs=x,
layer=Embedding,
input_dim=self.vocab_size,
output_dim=self.embedding_size,
embeddings_initializer=self.initializer,
mask_zero=True,
name='Embedding-Token'
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='Decoder-Embedding-Dropout'
)
if self.embedding_size != self.hidden_size:
x = self.apply(
inputs=x,
layer=Dense,
units=self.hidden_size,
kernel_initializer=self.initializer,
name='Decoder-Embedding-Mapping'
)
return [c, x]
def apply_main_layers(self, inputs, index):
"""T5的Dencoder主体是基于Self-Attention、Cross-Attention的模块
顺序:LN --> Att1 --> Add --> LN --> Att2 --> Add --> LN --> FFN --> Add
"""
c, x = inputs
z = self.layer_norm_conds[0]
self_attention_name = 'Decoder-Transformer-%d-MultiHeadSelfAttention' % index
cross_attention_name = 'Decoder-Transformer-%d-MultiHeadCrossAttention' % index
feed_forward_name = 'Decoder-Transformer-%d-FeedForward' % index
attention_mask = self.compute_attention_bias(index)
position_bias = self.compute_position_bias([x, c])
# Self Attention
xi = x
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
center=False,
epsilon=1e-6,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % self_attention_name
)
x = self.apply(
inputs=[x, x, x, attention_mask, position_bias[0]],
layer=MultiHeadAttention,
arguments={
'a_bias': True,
'p_bias': 't5_relative'
},
heads=self.num_attention_heads,
head_size=self.attention_head_size,
out_dim=self.hidden_size,
key_size=self.attention_key_size,
use_bias=False,
attention_scale=False,
kernel_initializer=self.initializer,
name=self_attention_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % self_attention_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % self_attention_name
)
# Cross Attention
xi = x
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
center=False,
epsilon=1e-6,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % cross_attention_name
)
x = self.apply(
inputs=[x, c, c, position_bias[1]],
layer=MultiHeadAttention,
arguments={
'a_bias': None,
'p_bias': 't5_relative'
},
heads=self.num_attention_heads,
head_size=self.attention_head_size,
out_dim=self.hidden_size,
key_size=self.attention_key_size,
use_bias=False,
attention_scale=False,
kernel_initializer=self.initializer,
name=cross_attention_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % cross_attention_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % cross_attention_name
)
# Feed Forward
xi = x
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
center=False,
epsilon=1e-6,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='%s-Norm' % feed_forward_name
)
x = self.apply(
inputs=x,
layer=FeedForward,
units=self.intermediate_size,
activation=self.hidden_act,
use_bias=False,
kernel_initializer=self.initializer,
name=feed_forward_name
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='%s-Dropout' % feed_forward_name
)
x = self.apply(
inputs=[xi, x], layer=Add, name='%s-Add' % feed_forward_name
)
return [c, x]
def apply_final_layers(self, inputs):
"""剩余部分
"""
c, x = inputs
z = self.layer_norm_conds[0]
x = self.apply(
inputs=self.simplify([x, z]),
layer=LayerNormalization,
center=False,
epsilon=1e-6,
conditional=(z is not None),
hidden_units=self.layer_norm_conds[1],
hidden_activation=self.layer_norm_conds[2],
hidden_initializer=self.initializer,
name='Decoder-Output-Norm'
)
x = self.apply(
inputs=x,
layer=Dropout,
rate=self.dropout_rate,
name='Decoder-Output-Dropout'
)
x = self.apply(
inputs=x,
layer=Lambda,
function=lambda x: x / np.sqrt(self.hidden_size),
mask=lambda i, m: m,
name='Decoder-Output-Scale'
)
if self.with_lm:
# 预测token概率部分
if self.embedding_size != self.hidden_size:
x = self.apply(
inputs=x,
layer=Dense,
units=self.embedding_size,
kernel_initializer=self.initializer,
name='Decoder-Output-Mapping'
)
lm_activation = 'softmax' if self.with_lm is True else self.with_lm
if self.version == 't5.1.0':
x = self.apply(
inputs=x,
layer=Embedding,
arguments={'mode': 'dense'},
name='Embedding-Token'
)
x = self.apply(
inputs=x,
layer=Activation,
activation=lm_activation,
name='Dencoder-Output-LM-Activation'
)
else:
x = self.apply(
inputs=x,
layer=Dense,
units=self.vocab_size,
activation=lm_activation,
use_bias=False,
kernel_initializer=self.initializer,
name='Decoder-Output-LM'
)
return x
def compute_attention_bias(self, inputs=None):
"""修改LM Mask的序列长度(从 self.inputs[0] 改为 self.inputs[1] )
"""
old_inputs = self.inputs[:]
self.inputs = [old_inputs[1]]
mask = super(T5_Decoder, self).compute_attention_bias(inputs)
self.inputs = old_inputs
return mask
def compute_position_bias(self, inputs=None):
"""T5相对位置编码
"""
if self.position_bias is None:
x, c = inputs
p1 = self.apply(
inputs=[x, x],
layer=RelativePositionEmbeddingT5,
input_dim=32,
output_dim=self.num_attention_heads,
bidirectional=False,
embeddings_initializer=self.initializer,
name='Decoder-Embedding-Relative-Position'
)
p2 = self.apply(
inputs=[x, c],
layer=RelativePositionEmbeddingT5,
input_dim=32,
output_dim=self.num_attention_heads,
bidirectional=False,
embeddings_initializer=self.initializer,
name='Decoder-Embedding-Relative-Position'
)
self.position_bias = (p1, p2)
return self.position_bias
class T5(T5_Base):
"""Google的T5模型(Encoder-Decoder)
"""
def __init__(self, **kwargs):
super(T5, self).__init__(**kwargs)
kwargs['layers'] = self.layers
e_name, d_name = 'Encoder', 'Decoder'
if 'name' in kwargs:
e_name = '%s_%s' % (kwargs['name'], e_name)
d_name = '%s_%s' % (kwargs['name'], d_name)
del kwargs['name'] # 防止重复传参
self._encoder = T5_Encoder(name=e_name, **kwargs)
self._decoder = T5_Decoder(name=d_name, **kwargs)
def build(self, **kwargs):
"""同时构建Encoder和Decoder
"""
self._encoder.build(**kwargs)
self._decoder.build(**kwargs)
self.encoder = self._encoder.model
self.decoder = self._decoder.model
self.inputs = self.encoder.inputs + self.decoder.inputs[1:]
self.outputs = self.decoder(
self.encoder.outputs + self.decoder.inputs[1:]
)
self.model = Model(self.inputs, self.outputs)
def extend_with_language_model(BaseModel):
"""添加下三角的Attention Mask(语言模型用)
"""
class LanguageModel(LM_Mask, BaseModel):
"""带下三角Attention Mask的派生模型
"""
def __init__(self, *args, **kwargs):
super(LanguageModel, self).__init__(*args, **kwargs)
self.with_mlm = self.with_mlm or True
return LanguageModel
def extend_with_unified_language_model(BaseModel):
"""添加UniLM的Attention Mask(Seq2Seq模型用)
"""
class UnifiedLanguageModel(UniLM_Mask, BaseModel):
"""带UniLM的Attention Mask的派生模型
UniLM: https://arxiv.org/abs/1905.03197
"""
def __init__(self, *args, **kwargs):
super(UnifiedLanguageModel, self).__init__(*args, **kwargs)
self.with_mlm = self.with_mlm or True
return UnifiedLanguageModel
def build_transformer_model(
config_path=None,
checkpoint_path=None,
model='bert',
application='encoder',
return_keras_model=True,
**kwargs
):
"""根据配置文件构建模型,可选加载checkpoint权重
"""
configs = {}
if config_path is not None:
configs.update(json.load(open(config_path)))
configs.update(kwargs)
if 'max_position' not in configs:
configs['max_position'] = configs.get('max_position_embeddings', 512)
if 'dropout_rate' not in configs:
configs['dropout_rate'] = configs.get('hidden_dropout_prob')
if 'segment_vocab_size' not in configs:
configs['segment_vocab_size'] = configs.get('type_vocab_size', 2)
models = {
'bert': BERT,
'albert': ALBERT,
'albert_unshared': ALBERT_Unshared,
'roberta': BERT,
'nezha': NEZHA,
'electra': ELECTRA,
'gpt': GPT,
'gpt2': GPT2,
'gpt2_ml': GPT2_ML,
't5': T5,
't5_encoder': T5_Encoder,
't5_decoder': T5_Decoder,
't5.1.0': T5,
't5.1.0_encoder': T5_Encoder,
't5.1.0_decoder': T5_Decoder,
't5.1.1': T5,
't5.1.1_encoder': T5_Encoder,
't5.1.1_decoder': T5_Decoder,
}
if is_string(model):
model = model.lower()
MODEL = models[model]
else:
MODEL = model
application = application.lower()
if application in ['lm', 'unilm'] and model in ['electra', 't5']:
raise ValueError(
'"%s" model can not be used as "%s" application.\n' %
(model, application)
)
if application == 'lm':
MODEL = extend_with_language_model(MODEL)
elif application == 'unilm':
MODEL = extend_with_unified_language_model(MODEL)
if model.startswith('t5.1.1'):
configs['version'] = 't5.1.1'
transformer = MODEL(**configs)
# 此处以Transformer类中的build()函数创建模型.
transformer.build(**configs)
if checkpoint_path is not None:
transformer.load_weights_from_checkpoint(checkpoint_path)
if return_keras_model:
return transformer.model
else:
return transformer
| 33.316331 | 121 | 0.536853 | [
"Apache-2.0"
] | CurisZhou/bert4keras | bert4keras/models.py | 80,151 | Python |
from mahotas import cwatershed
from mala.losses import ultrametric_loss_op
from scipy.ndimage.filters import gaussian_filter
from scipy.ndimage.filters import maximum_filter
from scipy.ndimage.morphology import distance_transform_edt
import gunpowder as gp
import json
import numpy as np
import skelerator
import tensorflow as tf
import logging
logging.basicConfig(level=logging.INFO)
with open("tensor_names.json", "r") as f:
tensor_names = json.load(f)
class Synthetic2DSource(gp.BatchProvider):
def __init__(self, raw, gt, smoothness=1.0, n_objects=3, points_per_skeleton=10):
self.raw = raw
self.gt = gt
self.smoothness = smoothness
self.n_objects = n_objects
self.points_per_skeleton = points_per_skeleton
def setup(self):
self.provides(
self.raw,
gp.ArraySpec(
roi=gp.Roi((0, 0), (1000, 1000)),
dtype=np.uint8,
interpolatable=True,
voxel_size=(1, 1),
),
)
self.provides(
self.gt,
gp.ArraySpec(
roi=gp.Roi((0, 0), (1000, 1000)),
dtype=np.uint64,
interpolatable=False,
voxel_size=(1, 1),
),
)
def provide(self, request):
voxel_size = self.spec[self.raw].voxel_size
shape = gp.Coordinate((1,) + request[self.raw].roi.get_shape())
noise = np.abs(np.random.randn(*shape))
smoothed_noise = gaussian_filter(noise, sigma=self.smoothness)
seeds = np.zeros(shape, dtype=int)
for i in range(self.n_objects):
if i == 0:
num_points = 100
else:
num_points = self.points_per_skeleton
points = np.stack(
[np.random.randint(0, shape[dim], num_points) for dim in range(3)],
axis=1,
)
tree = skelerator.Tree(points)
skeleton = skelerator.Skeleton(
tree, [1, 1, 1], "linear", generate_graph=False
)
seeds = skeleton.draw(seeds, np.array([0, 0, 0]), i + 1)
seeds[maximum_filter(seeds, size=4) != seeds] = 0
seeds_dt = distance_transform_edt(seeds == 0) + 5.0 * smoothed_noise
gt_data = cwatershed(seeds_dt, seeds).astype(np.uint64)[0] - 1
labels = np.unique(gt_data)
raw_data = np.zeros_like(gt_data, dtype=np.uint8)
value = 0
for label in labels:
raw_data[gt_data == label] = value
value += 255.0 / self.n_objects
spec = request[self.raw].copy()
spec.voxel_size = (1, 1)
raw = gp.Array(raw_data, spec)
spec = request[self.gt].copy()
spec.voxel_size = (1, 1)
gt_crop = (
request[self.gt].roi - request[self.raw].roi.get_begin()
) / voxel_size
gt_crop = gt_crop.to_slices()
gt = gp.Array(gt_data[gt_crop], spec)
batch = gp.Batch()
batch[self.raw] = raw
batch[self.gt] = gt
return batch
emst_name = "PyFuncStateless:0"
edges_u_name = "Gather:0"
edges_v_name = "Gather_1:0"
def add_loss(graph):
# k, h, w
embedding = graph.get_tensor_by_name(tensor_names["embedding"])
# h, w
fg = graph.get_tensor_by_name(tensor_names["fg"])
# h, w
gt_labels = graph.get_tensor_by_name(tensor_names["gt_labels"])
# h, w
gt_fg = tf.greater(gt_labels, 0, name="gt_fg")
# h, w
shape = tuple(fg.get_shape().as_list())
# 1, 1, h, w
maxima = tf.nn.pool(
tf.reshape(fg, (1, 1) + shape),
[10, 10],
"MAX",
"SAME",
strides=[1, 1],
data_format="NCHW",
)
# h, w
maxima = tf.reshape(tf.equal(fg, maxima), shape, name="maxima")
# 1, k, h, w
embedding = tf.reshape(embedding, (1,) + tuple(embedding.get_shape().as_list()))
# k, 1, h, w
embedding = tf.transpose(embedding, perm=[1, 0, 2, 3])
um_loss, emst, edges_u, edges_v, _ = ultrametric_loss_op(
embedding, gt_labels, mask=maxima, coordinate_scale=0.01
)
assert emst.name == emst_name
assert edges_u.name == edges_u_name
assert edges_v.name == edges_v_name
fg_loss = tf.losses.mean_squared_error(gt_fg, fg)
# higher learning rate for fg network
loss = um_loss + 10 * fg_loss
opt = tf.train.AdamOptimizer(
learning_rate=0.5e-5, beta1=0.95, beta2=0.999, epsilon=1e-8
)
optimizer = opt.minimize(loss)
return (loss, optimizer)
def train(n_iterations):
raw = gp.ArrayKey("RAW")
gt = gp.ArrayKey("GT")
gt_fg = gp.ArrayKey("GT_FP")
embedding = gp.ArrayKey("EMBEDDING")
fg = gp.ArrayKey("FG")
maxima = gp.ArrayKey("MAXIMA")
gradient_embedding = gp.ArrayKey("GRADIENT_EMBEDDING")
gradient_fg = gp.ArrayKey("GRADIENT_FG")
emst = gp.ArrayKey("EMST")
edges_u = gp.ArrayKey("EDGES_U")
edges_v = gp.ArrayKey("EDGES_V")
request = gp.BatchRequest()
request.add(raw, (200, 200))
request.add(gt, (160, 160))
snapshot_request = gp.BatchRequest()
snapshot_request[embedding] = request[gt]
snapshot_request[fg] = request[gt]
snapshot_request[gt_fg] = request[gt]
snapshot_request[maxima] = request[gt]
snapshot_request[gradient_embedding] = request[gt]
snapshot_request[gradient_fg] = request[gt]
snapshot_request[emst] = gp.ArraySpec()
snapshot_request[edges_u] = gp.ArraySpec()
snapshot_request[edges_v] = gp.ArraySpec()
pipeline = (
Synthetic2DSource(raw, gt)
+ gp.Normalize(raw)
+ gp.tensorflow.Train(
"train_net",
optimizer=add_loss,
loss=None,
inputs={tensor_names["raw"]: raw, tensor_names["gt_labels"]: gt},
outputs={
tensor_names["embedding"]: embedding,
tensor_names["fg"]: fg,
"maxima:0": maxima,
"gt_fg:0": gt_fg,
emst_name: emst,
edges_u_name: edges_u,
edges_v_name: edges_v,
},
gradients={
tensor_names["embedding"]: gradient_embedding,
tensor_names["fg"]: gradient_fg,
},
)
+ gp.Snapshot(
output_filename="{iteration}.hdf",
dataset_names={
raw: "volumes/raw",
gt: "volumes/gt",
embedding: "volumes/embedding",
fg: "volumes/fg",
maxima: "volumes/maxima",
gt_fg: "volumes/gt_fg",
gradient_embedding: "volumes/gradient_embedding",
gradient_fg: "volumes/gradient_fg",
emst: "emst",
edges_u: "edges_u",
edges_v: "edges_v",
},
dataset_dtypes={maxima: np.float32, gt_fg: np.float32},
every=100,
additional_request=snapshot_request,
)
)
with gp.build(pipeline):
for i in range(n_iterations):
pipeline.request_batch(request)
if __name__ == "__main__":
train(1000000)
| 29.364754 | 85 | 0.573203 | [
"MIT"
] | pattonw/mouselight | synthetic/blobs/train.py | 7,165 | Python |
from collections import defaultdict
from django.conf import settings
from prices import Price
from satchless.process import ProcessManager
from .steps import ShippingAddressStep, ShippingMethodStep, SummaryStep, DetailsStep
from ..cart import Cart
from ..core import analytics
from ..order.models import Order
STORAGE_SESSION_KEY = 'checkout_storage'
class CheckoutStorage(defaultdict):
modified = False
def __init__(self, *args, **kwargs):
super(CheckoutStorage, self).__init__(dict, *args, **kwargs)
class Checkout(ProcessManager):
steps = None
def __init__(self, request):
self.request = request
self.steps = []
try:
self.storage = CheckoutStorage(
request.session[STORAGE_SESSION_KEY])
except KeyError:
self.storage = CheckoutStorage()
self.cart = Cart.for_session_cart(request.cart,
discounts=request.discounts)
self.generate_steps(self.cart)
def __iter__(self):
return iter(self.steps)
def generate_steps(self, cart):
self.cart = cart
self.details_step = DetailsStep(
self.request, self.storage['details'], checkout=self)
if self.is_shipping_required():
self.shipping_address_step = ShippingAddressStep(
self.request, self.storage['shipping_address'], checkout=self)
shipping_address = self.shipping_address_step.address
self.steps.append(self.shipping_address_step)
self.shipping_method_step = ShippingMethodStep(
self.request, self.storage['shipping_method'],
shipping_address, self.cart, checkout=self)
self.steps.append(self.shipping_method_step)
else:
shipping_address = None
self.shipping_address_step = None
self.shipping_method_step = None
summary_step = SummaryStep(self.request, self.storage['summary'],
shipping_address, checkout=self)
self.steps.append(summary_step)
def get_total(self, **kwargs):
zero = Price(0, currency=settings.DEFAULT_CURRENCY)
cost_iterator = (total_with_shipping
for shipping, shipping_cost, total_with_shipping
in self.get_deliveries(**kwargs))
total = sum(cost_iterator, zero)
return total
def save(self):
self.request.session[STORAGE_SESSION_KEY] = dict(self.storage)
def clear_storage(self):
try:
del self.request.session[STORAGE_SESSION_KEY]
except KeyError:
pass
self.cart.clear()
def is_shipping_required(self):
return self.cart.is_shipping_required()
def get_deliveries(self, **kwargs):
for partition in self.cart.partition():
if (self.shipping_address_step and
self.shipping_method_step.shipping_method):
shipping_method = self.shipping_method_step.shipping_method
shipping_cost = shipping_method.get_delivery_total(partition)
else:
shipping_cost = Price(0, currency=settings.DEFAULT_CURRENCY)
total_with_shipping = partition.get_total(**kwargs) + shipping_cost
yield partition, shipping_cost, total_with_shipping
def create_order(self):
order = Order()
if self.request.user.is_authenticated():
order.user = self.request.user
for step in self.steps:
step.add_to_order(order)
if self.request.user.is_authenticated():
order.anonymous_user_email = ''
order.tracking_client_id = analytics.get_client_id(self.request)
order.total = self.get_total()
order.save()
return order
def available_steps(self):
available = []
for step in self:
step.is_step_available = True
available.append(step)
if not self.validate_step(step):
break
step.is_step_valid = True
return available
| 35.307692 | 84 | 0.635924 | [
"BSD-3-Clause"
] | spartonia/saleor | saleor/checkout/core.py | 4,131 | Python |
import abc
import collections.abc
import contextlib
import dataclasses
import itertools
import math
import operator
import re
import sys
import time
from collections import defaultdict
from datetime import datetime, timedelta
from functools import lru_cache
from hashlib import md5
from typing import Any, Optional
import dateutil.tz
import msgpack
import orjson
import pydantic
from fastapi import Depends, HTTPException, Query, Request, Response
from starlette.responses import JSONResponse, Send, StreamingResponse
# These modules are not directly used, but they register things on import.
from .. import queries
from ..media_type_registration import (
serialization_registry as default_serialization_registry,
)
from ..queries import KeyLookup, QueryValueError
from ..query_registration import query_registry as default_query_registry
from ..trees.in_memory import Tree as TreeInMemory
from ..utils import (
APACHE_ARROW_FILE_MIME_TYPE,
SerializationError,
UnsupportedShape,
modules_available,
)
from . import models
from .authentication import get_current_user
from .etag import tokenize
del queries
if modules_available("numpy", "dask.array"):
from ..structures import array as _array # noqa: F401
del _array
if modules_available("pandas", "pyarrow", "dask.dataframe"):
from ..structures import dataframe as _dataframe # noqa: F401
del _dataframe
if modules_available("xarray"):
from ..structures import xarray as _xarray # noqa: F401
del _xarray
_FILTER_PARAM_PATTERN = re.compile(r"filter___(?P<name>.*)___(?P<field>[^\d\W][\w\d]+)")
_LOCAL_TZINFO = dateutil.tz.gettz()
@lru_cache(1)
def get_query_registry():
"This may be overridden via dependency_overrides."
return default_query_registry
@lru_cache(1)
def get_serialization_registry():
"This may be overridden via dependency_overrides."
return default_serialization_registry
def get_root_tree():
raise NotImplementedError(
"This should be overridden via dependency_overrides. "
"See tiled.server.app.serve_tree()."
)
def entry(
path: str,
request: Request,
current_user: str = Depends(get_current_user),
root_tree: pydantic.BaseSettings = Depends(get_root_tree),
):
path_parts = [segment for segment in path.split("/") if segment]
entry = root_tree.authenticated_as(current_user)
try:
# Traverse into sub-tree(s).
for segment in path_parts:
try:
with record_timing(request.state.metrics, "acl"):
unauthenticated_entry = entry[segment]
except (KeyError, TypeError):
raise NoEntry(path_parts)
# TODO Update this when Tree has structure_family == "tree".
if not hasattr(unauthenticated_entry, "structure_family"):
with record_timing(request.state.metrics, "acl"):
entry = unauthenticated_entry.authenticated_as(current_user)
else:
entry = unauthenticated_entry
return entry
except NoEntry:
raise HTTPException(status_code=404, detail=f"No such entry: {path_parts}")
def reader(
entry: Any = Depends(entry),
):
"Specify a path parameter and use it to look up a reader."
if not isinstance(entry, DuckReader):
raise HTTPException(status_code=404, detail="This is not a Reader.")
return entry
def block(
# Ellipsis as the "default" tells FastAPI to make this parameter required.
block: str = Query(..., regex="^[0-9]*(,[0-9]+)*$"),
):
"Specify and parse a block index parameter."
if not block:
return ()
return tuple(map(int, block.split(",")))
def expected_shape(
expected_shape: Optional[str] = Query(
None, min_length=1, regex="^[0-9]+(,[0-9]+)*$|^scalar$"
),
):
"Specify and parse an expected_shape parameter."
if expected_shape is None:
return
if expected_shape == "scalar":
return ()
return tuple(map(int, expected_shape.split(",")))
def slice_(
slice: str = Query(None, regex="^[0-9,:]*$"),
):
"Specify and parse a block index parameter."
import numpy
# IMPORTANT We are eval-ing a user-provider string here so we need to be
# very careful about locking down what can be in it. The regex above
# excludes any letters or operators, so it is not possible to execute
# functions or expensive arithmetic.
return tuple(
[
eval(f"numpy.s_[{dim!s}]", {"numpy": numpy})
for dim in (slice or "").split(",")
if dim
]
)
def len_or_approx(tree):
"""
Prefer approximate length if implemented. (It's cheaper.)
"""
try:
return operator.length_hint(tree)
except TypeError:
return len(tree)
def pagination_links(route, path_parts, offset, limit, length_hint):
path_str = "/".join(path_parts)
links = {
"self": f"{route}/{path_str}?page[offset]={offset}&page[limit]={limit}",
# These are conditionally overwritten below.
"first": None,
"last": None,
"next": None,
"prev": None,
}
if limit:
last_page = math.floor(length_hint / limit) * limit
links.update(
{
"first": f"{route}/{path_str}?page[offset]={0}&page[limit]={limit}",
"last": f"{route}/{path_str}?page[offset]={last_page}&page[limit]={limit}",
}
)
if offset + limit < length_hint:
links[
"next"
] = f"{route}/{path_str}?page[offset]={offset + limit}&page[limit]={limit}"
if offset > 0:
links[
"prev"
] = f"{route}/{path_str}?page[offset]={max(0, offset - limit)}&page[limit]={limit}"
return links
class DuckReader(metaclass=abc.ABCMeta):
"""
Used for isinstance(obj, DuckReader):
"""
@classmethod
def __subclasshook__(cls, candidate):
# If the following condition is True, candidate is recognized
# to "quack" like a Reader.
EXPECTED_ATTRS = ("read", "macrostructure", "microstructure")
return all(hasattr(candidate, attr) for attr in EXPECTED_ATTRS)
class DuckTree(metaclass=abc.ABCMeta):
"""
Used for isinstance(obj, DuckTree):
"""
@classmethod
def __subclasshook__(cls, candidate):
# If the following condition is True, candidate is recognized
# to "quack" like a Tree.
EXPECTED_ATTRS = ("__getitem__", "__iter__")
return all(hasattr(candidate, attr) for attr in EXPECTED_ATTRS)
def construct_entries_response(
query_registry, tree, route, path, offset, limit, fields, filters, sort, base_url
):
path_parts = [segment for segment in path.split("/") if segment]
if not isinstance(tree, DuckTree):
raise WrongTypeForRoute("This is not a Tree.")
queries = defaultdict(
dict
) # e.g. {"text": {"text": "dog"}, "lookup": {"key": "..."}}
# Group the parameters by query type.
for key, value in filters.items():
if value is None:
continue
name, field = _FILTER_PARAM_PATTERN.match(key).groups()
queries[name][field] = value
sorting = []
if sort is not None:
for item in sort.split(","):
if item:
if item.startswith("-"):
sorting.append((item[1:], -1))
else:
sorting.append((item, 1))
if sorting:
if not hasattr(tree, "sort"):
raise HTTPException(
status_code=400, detail="This Tree does not support sorting."
)
tree = tree.sort(sorting)
# Apply the queries and obtain a narrowed tree.
key_lookups = []
for query_name, parameters_dict_of_lists in queries.items():
for i in itertools.count(0):
try:
parameters = {
field_name: parameters_list[i]
for field_name, parameters_list in parameters_dict_of_lists.items()
}
except IndexError:
break
query_class = query_registry.name_to_query_type[query_name]
# Special case:
# List fields are serialized as comma-separated strings.
for field in dataclasses.fields(query_class):
if getattr(field.type, "__origin__", None) is list:
(inner_type,) = field.type.__args__
parameters[field.name] = [
inner_type(item) for item in parameters[field.name].split(",")
]
try:
query = query_class(**parameters)
# Special case: Do key-lookups at the end after all other filtering.
# We do not require trees to implement this query; we implement it
# directly here by just calling __getitem__.
if isinstance(query, KeyLookup):
key_lookups.append(query.key)
continue
tree = tree.search(query)
except QueryValueError as err:
raise HTTPException(status_code=400, detail=err.args[0])
if key_lookups:
# Duplicates are technically legal because *any* query can be given
# with multiple parameters.
unique_key_lookups = set(key_lookups)
(key_lookup), *others = unique_key_lookups
if others:
# Two non-equal KeyLookup queries must return no results.
tree = TreeInMemory({})
else:
try:
tree = TreeInMemory(
{key_lookup: tree[key_lookup]}, must_revalidate=False
)
except KeyError:
tree = TreeInMemory({})
count = len_or_approx(tree)
links = pagination_links(route, path_parts, offset, limit, count)
data = []
if fields != [models.EntryFields.none]:
# Pull a page of items into memory.
items = tree.items_indexer[offset : offset + limit] # noqa: E203
else:
# Pull a page of just the keys, which is cheaper.
items = (
(key, None)
for key in tree.keys_indexer[offset : offset + limit] # noqa: E203
)
# This value will not leak out. It just used to seed comparisons.
metadata_stale_at = datetime.utcnow() + timedelta(days=1_000_000)
must_revalidate = getattr(tree, "must_revalidate", True)
for key, entry in items:
resource = construct_resource(base_url, path_parts + [key], entry, fields)
data.append(resource)
# If any entry has emtry.metadata_stale_at = None, then there will
# be no 'Expires' header. We will pessimistically assume the values
# are immediately stale.
if metadata_stale_at is not None:
if getattr(entry, "metadata_stale_at", None) is None:
metadata_stale_at = None
else:
metadata_stale_at = min(metadata_stale_at, entry.metadata_stale_at)
return (
models.Response(data=data, links=links, meta={"count": count}),
metadata_stale_at,
must_revalidate,
)
DEFAULT_MEDIA_TYPES = {
"array": "application/octet-stream",
"dataframe": APACHE_ARROW_FILE_MIME_TYPE,
"structured_array_tabular": "application/octet-stream",
"structured_array_generic": "application/octet-stream",
"variable": "application/octet-stream",
"data_array": "application/octet-stream",
"dataset": "application/netcdf",
}
def construct_data_response(
structure_family,
serialization_registry,
payload,
metadata,
request,
format=None,
specs=None,
expires=None,
):
request.state.endpoint = "data"
if specs is None:
specs = []
default_media_type = DEFAULT_MEDIA_TYPES[structure_family]
# Give priority to the `format` query parameter. Otherwise, consult Accept
# header.
if format is not None:
media_types_or_aliases = format.split(",")
# Resolve aliases, like "csv" -> "text/csv".
media_types = [
serialization_registry.resolve_alias(t) for t in media_types_or_aliases
]
else:
# The HTTP spec says these should be separated by ", " but some
# browsers separate with just "," (no space).
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Content_negotiation/List_of_default_Accept_values#default_values # noqa
# That variation is what we are handling below with lstrip.
media_types = [
s.lstrip(" ")
for s in request.headers.get("Accept", default_media_type).split(",")
]
# The client may give us a choice of media types. Find the first one
# that we support.
supported = set()
for media_type in media_types:
if media_type == "*/*":
media_type = default_media_type
# fall back to generic dataframe serializer if no specs present
for spec in specs + [structure_family]:
media_types_for_spec = serialization_registry.media_types(spec)
if media_type in media_types_for_spec:
break
supported.update(media_types_for_spec)
else:
# None of the specs or the structure_family can serialize to this
# media_type. Try the next one.
continue
# We found a match above. We have our media_type.
break
else:
# We have checked each of the media_types, and we cannot serialize
# to any of them.
raise UnsupportedMediaTypes(
f"None of the media types requested by the client are supported. "
f"Supported: {', '.join(supported)}. Requested: {', '.join(media_types)}.",
)
with record_timing(request.state.metrics, "tok"):
# Create an ETag that uniquely identifies this content and the media
# type that it will be encoded as.
etag = tokenize((payload, media_type))
headers = {"ETag": etag}
if expires is not None:
headers["Expires"] = expires.strftime(HTTP_EXPIRES_HEADER_FORMAT)
if request.headers.get("If-None-Match", "") == etag:
# If the client already has this content, confirm that.
return Response(status_code=304, headers=headers)
# This is the expensive step: actually serialize.
try:
content = serialization_registry(
structure_family, media_type, payload, metadata
)
except UnsupportedShape as err:
raise UnsupportedMediaTypes(
f"The shape of this data {err.args[0]} is incompatible with the requested format ({media_type}). "
f"Slice it or choose a different format.",
)
except SerializationError:
raise UnsupportedMediaTypes(
"This type is supported in general but there was an unknown error packing this specific data.",
)
return PatchedResponse(
content=content,
media_type=media_type,
headers=headers,
)
def construct_resource(base_url, path_parts, entry, fields):
path_str = "/".join(path_parts)
attributes = {}
if models.EntryFields.metadata in fields:
attributes["metadata"] = entry.metadata
if models.EntryFields.specs in fields:
attributes["specs"] = getattr(entry, "specs", None)
if isinstance(entry, DuckTree):
if models.EntryFields.count in fields:
attributes["count"] = len_or_approx(entry)
if hasattr(entry, "sorting"):
attributes["sorting"] = entry.sorting
resource = models.TreeResource(
**{
"id": path_parts[-1] if path_parts else "",
"attributes": models.TreeAttributes(**attributes),
"type": models.EntryType.tree,
"links": {
"self": f"{base_url}metadata/{path_str}",
"search": f"{base_url}search/{path_str}",
},
}
)
else:
links = {"self": f"{base_url}metadata/{path_str}"}
structure = {}
if entry is not None:
# entry is None when we are pulling just *keys* from the
# Tree and not values.
links.update(
{
link: template.format(base_url=base_url, path=path_str)
for link, template in FULL_LINKS[entry.structure_family].items()
}
)
if models.EntryFields.structure_family in fields:
attributes["structure_family"] = entry.structure_family
if models.EntryFields.macrostructure in fields:
macrostructure = entry.macrostructure()
if macrostructure is not None:
structure["macro"] = dataclasses.asdict(macrostructure)
if models.EntryFields.microstructure in fields:
if entry.structure_family == "dataframe":
# Special case: its microstructure is cannot be JSON-serialized
# and is therefore available from separate routes. Sends links
# instead of the actual payload.
structure["micro"] = {
"links": {
"meta": f"{base_url}dataframe/meta/{path_str}",
"divisions": f"{base_url}dataframe/divisions/{path_str}",
}
}
else:
microstructure = entry.microstructure()
if microstructure is not None:
structure["micro"] = dataclasses.asdict(microstructure)
if entry.structure_family == "array":
block_template = ",".join(
f"{{index_{index}}}"
for index in range(len(structure["macro"]["shape"]))
)
links[
"block"
] = f"{base_url}array/block/{path_str}?block={block_template}"
elif entry.structure_family == "dataframe":
links[
"partition"
] = f"{base_url}dataframe/partition/{path_str}?partition={{index}}"
elif entry.structure_family == "variable":
block_template = ",".join(
f"{{index_{index}}}"
for index in range(
len(structure["macro"]["data"]["macro"]["shape"])
)
)
links[
"block"
] = f"{base_url}variable/block/{path_str}?block={block_template}"
elif entry.structure_family == "data_array":
block_template = ",".join(
f"{{index_{index}}}"
for index in range(
len(structure["macro"]["variable"]["macro"]["data"])
)
)
links[
"block"
] = f"{base_url}data_array/block/{path_str}?block={block_template}"
elif entry.structure_family == "dataset":
links[
"block"
] = f"{base_url}dataset/block/{path_str}?variable={{variable}}&block={{block_indexes}}"
microstructure = entry.microstructure()
attributes["structure"] = structure
resource = models.ReaderResource(
**{
"id": path_parts[-1],
"attributes": models.ReaderAttributes(**attributes),
"type": models.EntryType.reader,
"links": links,
}
)
return resource
class PatchedResponse(Response):
"Patch the render method to accept memoryview."
def render(self, content: Any) -> bytes:
if isinstance(content, memoryview):
return content.cast("B")
return super().render(content)
class PatchedStreamingResponse(StreamingResponse):
"Patch the stream_response method to accept memoryview."
async def stream_response(self, send: Send) -> None:
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
async for chunk in self.body_iterator:
# BEGIN ALTERATION
if not isinstance(chunk, (bytes, memoryview)):
# END ALTERATION
chunk = chunk.encode(self.charset)
await send({"type": "http.response.body", "body": chunk, "more_body": True})
await send({"type": "http.response.body", "body": b"", "more_body": False})
class NumpySafeJSONResponse(JSONResponse):
def __init__(self, *args, metrics, **kwargs):
self.__metrics = metrics
super().__init__(*args, **kwargs)
def render(self, content: Any) -> bytes:
with record_timing(self.__metrics, "pack"):
return orjson.dumps(content, option=orjson.OPT_SERIALIZE_NUMPY)
def _numpy_safe_msgpack_encoder(obj):
# If numpy has not been imported yet, then we can be sure that obj
# is not a numpy object, and we want to avoid triggering a numpy
# import. (The server does not have a hard numpy dependency.)
if "numpy" in sys.modules:
import numpy
if isinstance(obj, (numpy.generic, numpy.ndarray)):
if numpy.isscalar(obj):
return obj.item()
return obj.tolist()
return obj
def _patch_naive_datetimes(obj):
"""
If a naive datetime is found, attach local time.
Msgpack can only serialize datetimes with tzinfo.
"""
if hasattr(obj, "items"):
patched_obj = {}
for k, v in obj.items():
patched_obj[k] = _patch_naive_datetimes(v)
elif (not isinstance(obj, str)) and isinstance(obj, collections.abc.Iterable):
patched_obj = []
for item in obj:
patched_obj.append(_patch_naive_datetimes(item))
elif isinstance(obj, datetime) and obj.tzinfo is None:
patched_obj = obj.astimezone(_LOCAL_TZINFO)
else:
patched_obj = obj
return patched_obj
class MsgpackResponse(Response):
media_type = "application/x-msgpack"
def __init__(self, *args, metrics, **kwargs):
self.__metrics = metrics
super().__init__(*args, **kwargs)
def render(self, content: Any, _reentered=False) -> bytes:
try:
with record_timing(self.__metrics, "pack"):
return msgpack.packb(
content, default=_numpy_safe_msgpack_encoder, datetime=True
)
except TypeError as err:
# msgpack tries to handle all datetimes, but if it
# received a naive one (tzinfo=None) then it fails.
# We cannot use the default hook to handle this because
# it is not called.
if err.args == ("can not serialize 'datetime.datetime' object",) and (
not _reentered
):
patched_content = _patch_naive_datetimes(content)
return self.render(patched_content, _reentered=True)
raise
JSON_MIME_TYPE = "application/json"
MSGPACK_MIME_TYPE = "application/x-msgpack"
# This is a silly time format, but it is the HTTP standard.
HTTP_EXPIRES_HEADER_FORMAT = "%a, %d %b %Y %H:%M:%S GMT"
def json_or_msgpack(request, content, expires=None, headers=None):
media_types = request.headers.get("Accept", JSON_MIME_TYPE).split(", ")
for media_type in media_types:
if media_type == "*/*":
media_type = JSON_MIME_TYPE
break
if media_type == MSGPACK_MIME_TYPE:
break
if media_type == JSON_MIME_TYPE:
break
else:
# It is commmon in HTTP to fall back on a default representation if
# none of the requested ones are available. We do not do this for
# data payloads, but it makes some sense to do it for these metadata
# messages.
media_type = JSON_MIME_TYPE
assert media_type in {JSON_MIME_TYPE, MSGPACK_MIME_TYPE}
content_as_dict = content.dict()
with record_timing(request.state.metrics, "tok"):
etag = md5(str(content_as_dict).encode()).hexdigest()
headers = headers or {}
headers["ETag"] = etag
if expires is not None:
headers["Expires"] = expires.strftime(HTTP_EXPIRES_HEADER_FORMAT)
if request.headers.get("If-None-Match", "") == etag:
# If the client already has this content, confirm that.
return Response(status_code=304, headers=headers)
if media_type == "application/x-msgpack":
return MsgpackResponse(
content_as_dict, headers=headers, metrics=request.state.metrics
)
return NumpySafeJSONResponse(
content_as_dict, headers=headers, metrics=request.state.metrics
)
class UnsupportedMediaTypes(Exception):
pass
class NoEntry(KeyError):
pass
class WrongTypeForRoute(Exception):
pass
FULL_LINKS = {
"array": {"full": "{base_url}array/full/{path}"},
"structured_array_generic": {
"full": "{base_url}structured_array_generic/full/{path}"
},
"structured_array_tabular": {
"full": "{base_url}structured_array_tabular/full/{path}"
},
"dataframe": {"full": "{base_url}dataframe/full/{path}"},
"variable": {"full": "{base_url}variable/full/{path}"},
"data_array": {"full_variable": "{base_url}data_array/variable/full/{path}"},
"dataset": {
"full_variable": "{base_url}dataset/data_var/full/{path}?variable={{variable}}",
"full_coordinate": "{base_url}dataset/coord/full/{path}?variable={{variable}}",
"full_dataset": "{base_url}dataset/full/{path}",
},
}
@contextlib.contextmanager
def record_timing(metrics, key):
"""
Set timings[key] equal to the run time (in milliseconds) of the context body.
"""
t0 = time.perf_counter()
yield
metrics[key]["dur"] += time.perf_counter() - t0 # Units: seconds
| 36.556485 | 132 | 0.602877 | [
"BSD-3-Clause"
] | martindurant/tiled | tiled/server/core.py | 26,211 | Python |
# coding: utf-8
"""
Eclipse Kapua REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class DevicesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def device_asset_filtered_get(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of assets # noqa: E501
Returns the list of all the Assets installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_asset_filtered_get(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:param DeviceAssets body: The filter of the request
:return: DeviceAssets
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_asset_filtered_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_asset_filtered_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_asset_filtered_get_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of assets # noqa: E501
Returns the list of all the Assets installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_asset_filtered_get_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:param DeviceAssets body: The filter of the request
:return: DeviceAssets
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'timeout', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_asset_filtered_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_asset_filtered_get`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_asset_filtered_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/assets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceAssets', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_asset_get(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of assets # noqa: E501
Returns the list of all the Assets installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_asset_get(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceAssets
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_asset_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_asset_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_asset_get_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of assets # noqa: E501
Returns the list of all the Assets installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_asset_get_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceAssets
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_asset_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_asset_get`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_asset_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/assets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceAssets', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_asset_read(self, scope_id, device_id, **kwargs): # noqa: E501
"""Reads asset channel values # noqa: E501
Returns the value read from the asset channel # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_asset_read(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:param DeviceAssets body: The filter of the read request
:return: DeviceAssets
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_asset_read_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_asset_read_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_asset_read_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Reads asset channel values # noqa: E501
Returns the value read from the asset channel # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_asset_read_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:param DeviceAssets body: The filter of the read request
:return: DeviceAssets
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'timeout', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_asset_read" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_asset_read`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_asset_read`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/assets/_read', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceAssets', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_asset_write(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of assets # noqa: E501
Returns the list of all the Assets installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_asset_write(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:param DeviceAssets body: The values to write to the asset channels
:return: DeviceAssets
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_asset_write_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_asset_write_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_asset_write_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of assets # noqa: E501
Returns the list of all the Assets installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_asset_write_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:param DeviceAssets body: The values to write to the asset channels
:return: DeviceAssets
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'timeout', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_asset_write" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_asset_write`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_asset_write`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/assets/_write', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceAssets', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_bundle_get(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of bundles # noqa: E501
Returns the list of all the Bundles installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_bundle_get(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceBundles
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_bundle_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_bundle_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_bundle_get_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of bundles # noqa: E501
Returns the list of all the Bundles installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_bundle_get_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceBundles
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_bundle_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_bundle_get`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_bundle_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/bundles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceBundles', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_bundle_start(self, scope_id, device_id, bundle_id, **kwargs): # noqa: E501
"""Start a bundle # noqa: E501
Starts the specified bundle # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_bundle_start(scope_id, device_id, bundle_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param str bundle_id: the ID of the bundle to start (required)
:param int timeout: The timeout of the operation in milliseconds
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_bundle_start_with_http_info(scope_id, device_id, bundle_id, **kwargs) # noqa: E501
else:
(data) = self.device_bundle_start_with_http_info(scope_id, device_id, bundle_id, **kwargs) # noqa: E501
return data
def device_bundle_start_with_http_info(self, scope_id, device_id, bundle_id, **kwargs): # noqa: E501
"""Start a bundle # noqa: E501
Starts the specified bundle # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_bundle_start_with_http_info(scope_id, device_id, bundle_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param str bundle_id: the ID of the bundle to start (required)
:param int timeout: The timeout of the operation in milliseconds
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'bundle_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_bundle_start" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_bundle_start`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_bundle_start`") # noqa: E501
# verify the required parameter 'bundle_id' is set
if ('bundle_id' not in params or
params['bundle_id'] is None):
raise ValueError("Missing the required parameter `bundle_id` when calling `device_bundle_start`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'bundle_id' in params:
path_params['bundleId'] = params['bundle_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/bundles/{bundleId}/_start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_bundle_stop(self, scope_id, device_id, bundle_id, **kwargs): # noqa: E501
"""Stop a bundle # noqa: E501
Stops the specified bundle # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_bundle_stop(scope_id, device_id, bundle_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param str bundle_id: the ID of the bundle to stop (required)
:param int timeout: The timeout of the operation in milliseconds
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_bundle_stop_with_http_info(scope_id, device_id, bundle_id, **kwargs) # noqa: E501
else:
(data) = self.device_bundle_stop_with_http_info(scope_id, device_id, bundle_id, **kwargs) # noqa: E501
return data
def device_bundle_stop_with_http_info(self, scope_id, device_id, bundle_id, **kwargs): # noqa: E501
"""Stop a bundle # noqa: E501
Stops the specified bundle # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_bundle_stop_with_http_info(scope_id, device_id, bundle_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device. (required)
:param str device_id: The id of the device (required)
:param str bundle_id: the ID of the bundle to stop (required)
:param int timeout: The timeout of the operation in milliseconds
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'bundle_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_bundle_stop" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_bundle_stop`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_bundle_stop`") # noqa: E501
# verify the required parameter 'bundle_id' is set
if ('bundle_id' not in params or
params['bundle_id'] is None):
raise ValueError("Missing the required parameter `bundle_id` when calling `device_bundle_stop`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'bundle_id' in params:
path_params['bundleId'] = params['bundle_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/bundles/{bundleId}/_stop', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_command_execute(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Executes a command # noqa: E501
Executes a remote command on a device and return the command output. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_command_execute(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device (required)
:param str device_id: The id of the device (required)
:param DeviceCommandInput body: The input command (required)
:param int timeout: The timeout of the command execution
:return: DeviceCommandOutput
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_command_execute_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_command_execute_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
return data
def device_command_execute_with_http_info(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Executes a command # noqa: E501
Executes a remote command on a device and return the command output. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_command_execute_with_http_info(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device (required)
:param str device_id: The id of the device (required)
:param DeviceCommandInput body: The input command (required)
:param int timeout: The timeout of the command execution
:return: DeviceCommandOutput
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'body', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_command_execute" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_command_execute`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_command_execute`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_command_execute`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/commands/_execute', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceCommandOutput', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_configuration_component_get(self, scope_id, device_id, component_id, **kwargs): # noqa: E501
"""Gets the configuration of a component on a device # noqa: E501
Returns the configuration of a device or the configuration of the OSGi component identified with specified PID (service's persistent identity). In the OSGi framework, the service's persistent identity is defined as the name attribute of the Component Descriptor XML file; at runtime, the same value is also available in the component.name and in the service.pid attributes of the Component Configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_configuration_component_get(scope_id, device_id, component_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device. (required)
:param str device_id: The id of the device (required)
:param str component_id: An optional id of the component to get the configuration for (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_configuration_component_get_with_http_info(scope_id, device_id, component_id, **kwargs) # noqa: E501
else:
(data) = self.device_configuration_component_get_with_http_info(scope_id, device_id, component_id, **kwargs) # noqa: E501
return data
def device_configuration_component_get_with_http_info(self, scope_id, device_id, component_id, **kwargs): # noqa: E501
"""Gets the configuration of a component on a device # noqa: E501
Returns the configuration of a device or the configuration of the OSGi component identified with specified PID (service's persistent identity). In the OSGi framework, the service's persistent identity is defined as the name attribute of the Component Descriptor XML file; at runtime, the same value is also available in the component.name and in the service.pid attributes of the Component Configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_configuration_component_get_with_http_info(scope_id, device_id, component_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device. (required)
:param str device_id: The id of the device (required)
:param str component_id: An optional id of the component to get the configuration for (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'component_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_configuration_component_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_configuration_component_get`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_configuration_component_get`") # noqa: E501
# verify the required parameter 'component_id' is set
if ('component_id' not in params or
params['component_id'] is None):
raise ValueError("Missing the required parameter `component_id` when calling `device_configuration_component_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'component_id' in params:
path_params['componentId'] = params['component_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/configurations/{componentId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_configuration_component_update(self, scope_id, device_id, component_id, body, **kwargs): # noqa: E501
"""Updates the configuration of a component on a device # noqa: E501
Updates a device component configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_configuration_component_update(scope_id, device_id, component_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device. (required)
:param str device_id: The id of the device (required)
:param str component_id: The component id to update (required)
:param DeviceComponentConfiguration body: The component configuration to send to the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_configuration_component_update_with_http_info(scope_id, device_id, component_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_configuration_component_update_with_http_info(scope_id, device_id, component_id, body, **kwargs) # noqa: E501
return data
def device_configuration_component_update_with_http_info(self, scope_id, device_id, component_id, body, **kwargs): # noqa: E501
"""Updates the configuration of a component on a device # noqa: E501
Updates a device component configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_configuration_component_update_with_http_info(scope_id, device_id, component_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device. (required)
:param str device_id: The id of the device (required)
:param str component_id: The component id to update (required)
:param DeviceComponentConfiguration body: The component configuration to send to the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'component_id', 'body', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_configuration_component_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_configuration_component_update`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_configuration_component_update`") # noqa: E501
# verify the required parameter 'component_id' is set
if ('component_id' not in params or
params['component_id'] is None):
raise ValueError("Missing the required parameter `component_id` when calling `device_configuration_component_update`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_configuration_component_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'component_id' in params:
path_params['componentId'] = params['component_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/configurations/{componentId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_configuration_get(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets the device configurations # noqa: E501
Returns the current configuration of a device # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_configuration_get(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_configuration_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_configuration_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_configuration_get_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets the device configurations # noqa: E501
Returns the current configuration of a device # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_configuration_get_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device. (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_configuration_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_configuration_get`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_configuration_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/configurations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_configuration_update(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Updates a device configuration # noqa: E501
Updates a device configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_configuration_update(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device. (required)
:param str device_id: The id of the device (required)
:param DeviceConfiguration body: The configuration to send to the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_configuration_update_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_configuration_update_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
return data
def device_configuration_update_with_http_info(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Updates a device configuration # noqa: E501
Updates a device configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_configuration_update_with_http_info(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device. (required)
:param str device_id: The id of the device (required)
:param DeviceConfiguration body: The configuration to send to the device (required)
:param int timeout: The timeout of the operation in milliseconds
:return: DeviceConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'body', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_configuration_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_configuration_update`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_configuration_update`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_configuration_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/configurations', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_count(self, scope_id, body, **kwargs): # noqa: E501
"""Counts the Devices # noqa: E501
Counts the Devices with the given DeviceQuery parameter returning the number of matching Devices # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_count(scope_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to count results (required)
:param DeviceQuery body: The DeviceQuery to use to filter count results (required)
:return: CountResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_count_with_http_info(scope_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_count_with_http_info(scope_id, body, **kwargs) # noqa: E501
return data
def device_count_with_http_info(self, scope_id, body, **kwargs): # noqa: E501
"""Counts the Devices # noqa: E501
Counts the Devices with the given DeviceQuery parameter returning the number of matching Devices # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_count_with_http_info(scope_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to count results (required)
:param DeviceQuery body: The DeviceQuery to use to filter count results (required)
:return: CountResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_count" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_count`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_count`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/_count', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CountResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_create(self, scope_id, body, **kwargs): # noqa: E501
"""Create an Device # noqa: E501
Creates a new Device based on the information provided in DeviceCreator parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_create(scope_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to create the Device. (required)
:param DeviceCreator body: Provides the information for the new Device to be created (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_create_with_http_info(scope_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_create_with_http_info(scope_id, body, **kwargs) # noqa: E501
return data
def device_create_with_http_info(self, scope_id, body, **kwargs): # noqa: E501
"""Create an Device # noqa: E501
Creates a new Device based on the information provided in DeviceCreator parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_create_with_http_info(scope_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to create the Device. (required)
:param DeviceCreator body: Provides the information for the new Device to be created (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_create`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_delete(self, scope_id, device_id, **kwargs): # noqa: E501
"""Delete a Device # noqa: E501
Deletes the Device specified by the \"deviceId\" path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_delete(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device to delete. (required)
:param str device_id: The id of the Device to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_delete_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_delete_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_delete_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Delete a Device # noqa: E501
Deletes the Device specified by the \"deviceId\" path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_delete_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device to delete. (required)
:param str device_id: The id of the Device to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_delete`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_event_count(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Counts the DeviceEvents # noqa: E501
Counts the DeviceEvents with the given DeviceEventQuery parameter returning the number of matching DeviceEvents # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_count(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to count results. (required)
:param str device_id: The id of the Device in which to count results (required)
:param DeviceEventQuery body: The DeviceEventQuery to use to filter count results (required)
:return: CountResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_event_count_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_event_count_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
return data
def device_event_count_with_http_info(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Counts the DeviceEvents # noqa: E501
Counts the DeviceEvents with the given DeviceEventQuery parameter returning the number of matching DeviceEvents # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_count_with_http_info(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to count results. (required)
:param str device_id: The id of the Device in which to count results (required)
:param DeviceEventQuery body: The DeviceEventQuery to use to filter count results (required)
:return: CountResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_event_count" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_event_count`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_event_count`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_event_count`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/events/_count', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CountResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_event_delete(self, scope_id, device_id, device_event_id, **kwargs): # noqa: E501
"""Delete a DeviceEvent # noqa: E501
Deletes the DeviceEvent specified by the \"deviceEventId\" path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_delete(scope_id, device_id, device_event_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: (required)
:param str device_id: The id of the Device in which to delete the event. (required)
:param str device_event_id: The id of the DeviceEvent to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_event_delete_with_http_info(scope_id, device_id, device_event_id, **kwargs) # noqa: E501
else:
(data) = self.device_event_delete_with_http_info(scope_id, device_id, device_event_id, **kwargs) # noqa: E501
return data
def device_event_delete_with_http_info(self, scope_id, device_id, device_event_id, **kwargs): # noqa: E501
"""Delete a DeviceEvent # noqa: E501
Deletes the DeviceEvent specified by the \"deviceEventId\" path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_delete_with_http_info(scope_id, device_id, device_event_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: (required)
:param str device_id: The id of the Device in which to delete the event. (required)
:param str device_event_id: The id of the DeviceEvent to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'device_event_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_event_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_event_delete`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_event_delete`") # noqa: E501
# verify the required parameter 'device_event_id' is set
if ('device_event_id' not in params or
params['device_event_id'] is None):
raise ValueError("Missing the required parameter `device_event_id` when calling `device_event_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'device_event_id' in params:
path_params['deviceEventId'] = params['device_event_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/events/{deviceEventId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_event_find(self, scope_id, device_id, device_event_id, **kwargs): # noqa: E501
"""Get an DeviceEvent # noqa: E501
Returns the DeviceEvent specified by the \"deviceEventId\" path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_find(scope_id, device_id, device_event_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the requested DeviceEvent. (required)
:param str device_id: The id of the requested Device (required)
:param str device_event_id: The id of the requested DeviceEvent (required)
:return: DeviceEvent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_event_find_with_http_info(scope_id, device_id, device_event_id, **kwargs) # noqa: E501
else:
(data) = self.device_event_find_with_http_info(scope_id, device_id, device_event_id, **kwargs) # noqa: E501
return data
def device_event_find_with_http_info(self, scope_id, device_id, device_event_id, **kwargs): # noqa: E501
"""Get an DeviceEvent # noqa: E501
Returns the DeviceEvent specified by the \"deviceEventId\" path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_find_with_http_info(scope_id, device_id, device_event_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the requested DeviceEvent. (required)
:param str device_id: The id of the requested Device (required)
:param str device_event_id: The id of the requested DeviceEvent (required)
:return: DeviceEvent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'device_event_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_event_find" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_event_find`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_event_find`") # noqa: E501
# verify the required parameter 'device_event_id' is set
if ('device_event_id' not in params or
params['device_event_id'] is None):
raise ValueError("Missing the required parameter `device_event_id` when calling `device_event_find`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'device_event_id' in params:
path_params['deviceEventId'] = params['device_event_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/events/{deviceEventId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceEvent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_event_query(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Queries the DeviceEvents # noqa: E501
Queries the DeviceEvents with the given DeviceEvents parameter returning all matching DeviceEvents # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_query(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to search results. (required)
:param str device_id: The id of the Device in which to search results (required)
:param DeviceEventQuery body: The DeviceEventQuery to use to filter results. (required)
:return: DeviceEventListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_event_query_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_event_query_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
return data
def device_event_query_with_http_info(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Queries the DeviceEvents # noqa: E501
Queries the DeviceEvents with the given DeviceEvents parameter returning all matching DeviceEvents # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_query_with_http_info(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to search results. (required)
:param str device_id: The id of the Device in which to search results (required)
:param DeviceEventQuery body: The DeviceEventQuery to use to filter results. (required)
:return: DeviceEventListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_event_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_event_query`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_event_query`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_event_query`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/events/_query', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceEventListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_event_simple_query(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets the DeviceEvent list in the scope # noqa: E501
Returns the list of all the deviceEvents associated to the current selected scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_simple_query(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to search results. (required)
:param str device_id: The client id to filter results. (required)
:param str resource: The resource of the DeviceEvent in which to search results
:param int offset: The result set offset.
:param int limit: The result set limit.
:return: DeviceEventListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_event_simple_query_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_event_simple_query_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_event_simple_query_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets the DeviceEvent list in the scope # noqa: E501
Returns the list of all the deviceEvents associated to the current selected scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_event_simple_query_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to search results. (required)
:param str device_id: The client id to filter results. (required)
:param str resource: The resource of the DeviceEvent in which to search results
:param int offset: The result set offset.
:param int limit: The result set limit.
:return: DeviceEventListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'resource', 'offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_event_simple_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_event_simple_query`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_event_simple_query`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'resource' in params:
query_params.append(('resource', params['resource'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/events', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceEventListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_find(self, scope_id, device_id, **kwargs): # noqa: E501
"""Get a Device # noqa: E501
Returns the Device specified by the \"deviceId\" path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_find(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the requested Device (required)
:param str device_id: The id of the requested Device (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_find_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_find_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_find_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Get a Device # noqa: E501
Returns the Device specified by the \"deviceId\" path parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_find_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the requested Device (required)
:param str device_id: The id of the requested Device (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_find" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_find`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_find`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_package_download(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Installs a package # noqa: E501
Installs a package into the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_package_download(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device (required)
:param str device_id: The id of the device (required)
:param DevicePackageDownloadRequest body: Mandatory object with all the informations needed to download and install a package (required)
:param int timeout: The timeout of the operation
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_package_download_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_package_download_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
return data
def device_package_download_with_http_info(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Installs a package # noqa: E501
Installs a package into the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_package_download_with_http_info(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device (required)
:param str device_id: The id of the device (required)
:param DevicePackageDownloadRequest body: Mandatory object with all the informations needed to download and install a package (required)
:param int timeout: The timeout of the operation
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'body', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_package_download" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_package_download`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_package_download`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_package_download`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/packages/_download', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_package_get(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of packages # noqa: E501
Returns the list of all the packages installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_package_get(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation
:return: DevicePackages
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_package_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_package_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_package_get_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of packages # noqa: E501
Returns the list of all the packages installed on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_package_get_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation
:return: DevicePackages
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_package_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_package_get`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_package_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/packages', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DevicePackages', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_package_uninstall(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Uninstalls a package # noqa: E501
Uninstalls a package into the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_package_uninstall(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device (required)
:param str device_id: The id of the device (required)
:param DevicePackageUninstallRequest body: Mandatory object with all the informations needed to uninstall a package (required)
:param int timeout: The timeout of the operation
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_package_uninstall_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_package_uninstall_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
return data
def device_package_uninstall_with_http_info(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Uninstalls a package # noqa: E501
Uninstalls a package into the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_package_uninstall_with_http_info(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the Device (required)
:param str device_id: The id of the device (required)
:param DevicePackageUninstallRequest body: Mandatory object with all the informations needed to uninstall a package (required)
:param int timeout: The timeout of the operation
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'body', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_package_uninstall" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_package_uninstall`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_package_uninstall`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_package_uninstall`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/packages/_uninstall', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_query(self, scope_id, body, **kwargs): # noqa: E501
"""Queries the Devices # noqa: E501
Queries the Devices with the given Devices parameter returning all matching Devices # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_query(scope_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to search results. (required)
:param DeviceQuery body: The DeviceQuery to use to filter results. (required)
:return: DeviceListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_query_with_http_info(scope_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_query_with_http_info(scope_id, body, **kwargs) # noqa: E501
return data
def device_query_with_http_info(self, scope_id, body, **kwargs): # noqa: E501
"""Queries the Devices # noqa: E501
Queries the Devices with the given Devices parameter returning all matching Devices # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_query_with_http_info(scope_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to search results. (required)
:param DeviceQuery body: The DeviceQuery to use to filter results. (required)
:return: DeviceListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_query`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_query`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/_query', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_request_send(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Sends a request # noqa: E501
Sends a request message to a device # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_request_send(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device (required)
:param str device_id: The id of the device (required)
:param JsonGenericRequestMessage body: The input request (required)
:param int timeout: The timeout of the request execution
:return: JsonGenericResponseMessage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_request_send_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_request_send_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
return data
def device_request_send_with_http_info(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Sends a request # noqa: E501
Sends a request message to a device # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_request_send_with_http_info(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device (required)
:param str device_id: The id of the device (required)
:param JsonGenericRequestMessage body: The input request (required)
:param int timeout: The timeout of the request execution
:return: JsonGenericResponseMessage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'body', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_request_send" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_request_send`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_request_send`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_request_send`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/requests', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JsonGenericResponseMessage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_simple_query(self, scope_id, **kwargs): # noqa: E501
"""Gets the Device list in the scope # noqa: E501
Returns the list of all the devices associated to the current selected scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_simple_query(scope_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to search results. (required)
:param str tag_id: The tag id to filter results.
:param str client_id: The client id to filter results.
:param str status: The connection status to filter results.
:param list[str] fetch_attributes: Additional attributes to be returned. Allowed values: connection, lastEvent
:param int offset: The result set offset.
:param int limit: The result set limit.
:return: DeviceListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_simple_query_with_http_info(scope_id, **kwargs) # noqa: E501
else:
(data) = self.device_simple_query_with_http_info(scope_id, **kwargs) # noqa: E501
return data
def device_simple_query_with_http_info(self, scope_id, **kwargs): # noqa: E501
"""Gets the Device list in the scope # noqa: E501
Returns the list of all the devices associated to the current selected scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_simple_query_with_http_info(scope_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId in which to search results. (required)
:param str tag_id: The tag id to filter results.
:param str client_id: The client id to filter results.
:param str status: The connection status to filter results.
:param list[str] fetch_attributes: Additional attributes to be returned. Allowed values: connection, lastEvent
:param int offset: The result set offset.
:param int limit: The result set limit.
:return: DeviceListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'tag_id', 'client_id', 'status', 'fetch_attributes', 'offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_simple_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_simple_query`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
query_params = []
if 'tag_id' in params:
query_params.append(('tagId', params['tag_id'])) # noqa: E501
if 'client_id' in params:
query_params.append(('clientId', params['client_id'])) # noqa: E501
if 'status' in params:
query_params.append(('status', params['status'])) # noqa: E501
if 'fetch_attributes' in params:
query_params.append(('fetchAttributes', params['fetch_attributes'])) # noqa: E501
collection_formats['fetchAttributes'] = 'multi' # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_snapshot_get(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of snapshots # noqa: E501
Returns the list of all the Snapshots available on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_snapshot_get(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation
:return: DeviceSnapshots
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_snapshot_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
else:
(data) = self.device_snapshot_get_with_http_info(scope_id, device_id, **kwargs) # noqa: E501
return data
def device_snapshot_get_with_http_info(self, scope_id, device_id, **kwargs): # noqa: E501
"""Gets a list of snapshots # noqa: E501
Returns the list of all the Snapshots available on the device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_snapshot_get_with_http_info(scope_id, device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device (required)
:param str device_id: The id of the device (required)
:param int timeout: The timeout of the operation
:return: DeviceSnapshots
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_snapshot_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_snapshot_get`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_snapshot_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/snapshots', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceSnapshots', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_snapshot_rollback(self, scope_id, device_id, snapshot_id, **kwargs): # noqa: E501
"""Gets a list of snapshots # noqa: E501
Updates the configuration of a device rolling back a given snapshot ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_snapshot_rollback(scope_id, device_id, snapshot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device (required)
:param str device_id: The id of the device (required)
:param str snapshot_id: the ID of the snapshot to rollback to (required)
:param int timeout: The timeout of the operation
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_snapshot_rollback_with_http_info(scope_id, device_id, snapshot_id, **kwargs) # noqa: E501
else:
(data) = self.device_snapshot_rollback_with_http_info(scope_id, device_id, snapshot_id, **kwargs) # noqa: E501
return data
def device_snapshot_rollback_with_http_info(self, scope_id, device_id, snapshot_id, **kwargs): # noqa: E501
"""Gets a list of snapshots # noqa: E501
Updates the configuration of a device rolling back a given snapshot ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_snapshot_rollback_with_http_info(scope_id, device_id, snapshot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the device (required)
:param str device_id: The id of the device (required)
:param str snapshot_id: the ID of the snapshot to rollback to (required)
:param int timeout: The timeout of the operation
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'snapshot_id', 'timeout'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_snapshot_rollback" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_snapshot_rollback`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_snapshot_rollback`") # noqa: E501
# verify the required parameter 'snapshot_id' is set
if ('snapshot_id' not in params or
params['snapshot_id'] is None):
raise ValueError("Missing the required parameter `snapshot_id` when calling `device_snapshot_rollback`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'snapshot_id' in params:
path_params['snapshotId'] = params['snapshot_id'] # noqa: E501
query_params = []
if 'timeout' in params:
query_params.append(('timeout', params['timeout'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}/snapshots/{snapshotId}/_rollback', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def device_update(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Update a Device # noqa: E501
Updates a new Device based on the information provided in the Device parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_update(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the requested Device. (required)
:param str device_id: The id of the requested Device (required)
:param Device body: The modified Device whose attributed need to be updated (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.device_update_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
else:
(data) = self.device_update_with_http_info(scope_id, device_id, body, **kwargs) # noqa: E501
return data
def device_update_with_http_info(self, scope_id, device_id, body, **kwargs): # noqa: E501
"""Update a Device # noqa: E501
Updates a new Device based on the information provided in the Device parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_update_with_http_info(scope_id, device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str scope_id: The ScopeId of the requested Device. (required)
:param str device_id: The id of the requested Device (required)
:param Device body: The modified Device whose attributed need to be updated (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope_id', 'device_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method device_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope_id' is set
if ('scope_id' not in params or
params['scope_id'] is None):
raise ValueError("Missing the required parameter `scope_id` when calling `device_update`") # noqa: E501
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `device_update`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `device_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope_id' in params:
path_params['scopeId'] = params['scope_id'] # noqa: E501
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/xml', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapuaAccessToken'] # noqa: E501
return self.api_client.call_api(
'/{scopeId}/devices/{deviceId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.859882 | 426 | 0.624324 | [
"MIT"
] | liang-faan/SmartIOT-Diec | kapua-client/python-client/swagger_client/api/devices_api.py | 152,075 | Python |
"""
Conversion of length units.
Available Units:- Metre,Kilometre,Feet,Inch,Centimeter,Yard,Foot,Mile,Millimeter
USAGE :
-> Import this file into their respective project.
-> Use the function length_conversion() for conversion of length units.
-> Parameters :
-> value : The number of from units you want to convert
-> from_type : From which type you want to convert
-> to_type : To which type you want to convert
REFERENCES :
-> Wikipedia reference: https://en.wikipedia.org/wiki/Meter
-> Wikipedia reference: https://en.wikipedia.org/wiki/Kilometer
-> Wikipedia reference: https://en.wikipedia.org/wiki/Feet
-> Wikipedia reference: https://en.wikipedia.org/wiki/Inch
-> Wikipedia reference: https://en.wikipedia.org/wiki/Centimeter
-> Wikipedia reference: https://en.wikipedia.org/wiki/Yard
-> Wikipedia reference: https://en.wikipedia.org/wiki/Foot
-> Wikipedia reference: https://en.wikipedia.org/wiki/Mile
-> Wikipedia reference: https://en.wikipedia.org/wiki/Millimeter
"""
from collections import namedtuple
from_to = namedtuple("from_to", "from_ to")
TYPE_CONVERSION = {
"millimeter": "mm",
"centimeter": "cm",
"meter": "m",
"kilometer": "km",
"inch": "in",
"inche": "in", # Trailing 's' has been stripped off
"feet": "ft",
"foot": "ft",
"yard": "yd",
"mile": "mi",
}
METRIC_CONVERSION = {
"mm": from_to(0.001, 1000),
"cm": from_to(0.01, 100),
"m": from_to(1, 1),
"km": from_to(1000, 0.001),
"in": from_to(0.0254, 39.3701),
"ft": from_to(0.3048, 3.28084),
"yd": from_to(0.9144, 1.09361),
"mi": from_to(1609.34, 0.000621371),
}
def length_conversion(value: float, from_type: str, to_type: str) -> float:
"""
Conversion between length units.
>>> length_conversion(4, "METER", "FEET")
13.12336
>>> length_conversion(4, "M", "FT")
13.12336
>>> length_conversion(1, "meter", "kilometer")
0.001
>>> length_conversion(1, "kilometer", "inch")
39370.1
>>> length_conversion(3, "kilometer", "mile")
1.8641130000000001
>>> length_conversion(2, "feet", "meter")
0.6096
>>> length_conversion(4, "feet", "yard")
1.333329312
>>> length_conversion(1, "inch", "meter")
0.0254
>>> length_conversion(2, "inch", "mile")
3.15656468e-05
>>> length_conversion(2, "centimeter", "millimeter")
20.0
>>> length_conversion(2, "centimeter", "yard")
0.0218722
>>> length_conversion(4, "yard", "meter")
3.6576
>>> length_conversion(4, "yard", "kilometer")
0.0036576
>>> length_conversion(3, "foot", "meter")
0.9144000000000001
>>> length_conversion(3, "foot", "inch")
36.00001944
>>> length_conversion(4, "mile", "kilometer")
6.43736
>>> length_conversion(2, "miles", "InChEs")
126719.753468
>>> length_conversion(3, "millimeter", "centimeter")
0.3
>>> length_conversion(3, "mm", "in")
0.1181103
>>> length_conversion(4, "wrongUnit", "inch")
Traceback (most recent call last):
...
ValueError: Invalid 'from_type' value: 'wrongUnit'.
Conversion abbreviations are: mm, cm, m, km, in, ft, yd, mi
"""
new_from = from_type.lower().rstrip("s")
new_from = TYPE_CONVERSION.get(new_from, new_from)
new_to = to_type.lower().rstrip("s")
new_to = TYPE_CONVERSION.get(new_to, new_to)
if new_from not in METRIC_CONVERSION:
raise ValueError(
f"Invalid 'from_type' value: {from_type!r}.\n"
f"Conversion abbreviations are: {', '.join(METRIC_CONVERSION)}"
)
if new_to not in METRIC_CONVERSION:
raise ValueError(
f"Invalid 'to_type' value: {to_type!r}.\n"
f"Conversion abbreviations are: {', '.join(METRIC_CONVERSION)}"
)
return value * METRIC_CONVERSION[new_from].from_ * METRIC_CONVERSION[new_to].to
if __name__ == "__main__":
import doctest
doctest.testmod()
| 32.853659 | 84 | 0.621133 | [
"MIT"
] | 04n0/TheAlgorithms-Python | conversions/length_conversion.py | 4,041 | Python |
from ._stopping_criterion import StoppingCriterion
from ..accumulate_data import LDTransformData
from ..util import MaxSamplesWarning, ParameterError, ParameterWarning
from numpy import *
from time import time
import warnings
class CubQMCLDG(StoppingCriterion):
"""
Abstract class for CubQMC{LD}G where LD is a low discrepancy discrete distribution.
See subclasses for implementation differences for each LD sequence.
"""
def __init__(self, integrand, abs_tol, rel_tol, n_init, n_max, fudge, check_cone,
control_variates, control_variate_means, update_beta, ptransform,
coefv, allowed_levels, allowed_distribs, cast_complex):
self.parameters = ['abs_tol','rel_tol','n_init','n_max']
# Input Checks
self.abs_tol = float(abs_tol)
self.rel_tol = float(rel_tol)
m_min = log2(n_init)
m_max = log2(n_max)
if m_min%1 != 0. or m_min < 8. or m_max%1 != 0.:
warning_s = '''
n_init and n_max must be a powers of 2.
n_init must be >= 2^8.
Using n_init = 2^10 and n_max=2^35.'''
warnings.warn(warning_s, ParameterWarning)
m_min = 10.
m_max = 35.
self.n_init = 2.**m_min
self.n_max = 2.**m_max
self.m_min = m_min
self.m_max = m_max
self.fudge = fudge
self.check_cone = check_cone
self.coefv = coefv
self.ptransform = ptransform
self.cast_complex = cast_complex
# QMCPy Objs
self.integrand = integrand
self.true_measure = self.integrand.true_measure
self.discrete_distrib = self.integrand.discrete_distrib
self.cv = control_variates
self.cv_mu = control_variate_means
self.ub = update_beta
# Verify Compliant Construction
super(CubQMCLDG,self).__init__(allowed_levels, allowed_distribs, allow_vectorized_integrals=False)
def integrate(self):
""" See abstract method. """
# Construct AccumulateData Object to House Integration data
self.data = LDTransformData(self, self.integrand, self.true_measure, self.discrete_distrib,
self.coefv, self.m_min, self.m_max, self.fudge, self.check_cone, ptransform=self.ptransform,
cast_complex=self.cast_complex, control_variates=self.cv, control_variate_means=self.cv_mu, update_beta=self.ub)
t_start = time()
while True:
self.data.update_data()
# Check the end of the algorithm
self.data.error_bound = self.data.fudge(self.data.m)*self.data.stilde
# Compute optimal estimator
ub = max(self.abs_tol, self.rel_tol*abs(self.data.solution + self.data.error_bound))
lb = max(self.abs_tol, self.rel_tol*abs(self.data.solution - self.data.error_bound))
self.data.solution = self.data.solution - self.data.error_bound*(ub-lb) / (ub+lb)
if 4*self.data.error_bound**2./(ub+lb)**2. <= 1.:
# stopping criterion met
break
elif self.data.m == self.data.m_max:
# doubling samples would go over n_max
warning_s = """
Alread generated %d samples.
Trying to generate %d new samples would exceed n_max = %d.
No more samples will be generated.
Note that error tolerances may no longer be satisfied""" \
% (int(2**self.data.m), int(2**self.data.m), int(2**self.data.m_max))
warnings.warn(warning_s, MaxSamplesWarning)
break
else:
# double sample size
self.data.m += 1.
self.data.time_integrate = time() - t_start
return self.data.solution, self.data
def set_tolerance(self, abs_tol=None, rel_tol=None):
"""
See abstract method.
Args:
abs_tol (float): absolute tolerance. Reset if supplied, ignored if not.
rel_tol (float): relative tolerance. Reset if supplied, ignored if not.
"""
if abs_tol != None: self.abs_tol = abs_tol
if rel_tol != None: self.rel_tol = rel_tol
| 44.347368 | 124 | 0.618562 | [
"Apache-2.0"
] | QMCSoftware/QMCSoftware | qmcpy/stopping_criterion/_cub_qmc_ld_g.py | 4,213 | Python |
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.flow_monitor', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::FlowClassifier> [struct]
module.add_class('DefaultDeleter', template_parameters=['ns3::FlowClassifier'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## flow-monitor-helper.h (module 'flow-monitor'): ns3::FlowMonitorHelper [class]
module.add_class('FlowMonitorHelper')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## histogram.h (module 'flow-monitor'): ns3::Histogram [class]
module.add_class('Histogram')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress [class]
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e [enumeration]
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress [class]
module.add_class('Ipv6InterfaceAddress', import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e [enumeration]
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e [enumeration]
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
module.add_class('Mac8Address', import_from_module='ns.network')
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address'])
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator', u'ns3::NodeContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator*', u'ns3::NodeContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator&', u'ns3::NodeContainer::Iterator&')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::ItemType [enumeration]
module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## simulator.h (module 'core'): ns3::Simulator [enumeration]
module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'], import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-128.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-128.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header [class]
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType [enumeration]
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header [class]
module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::NextHeader_e [enumeration]
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::EcnType [enumeration]
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::FlowClassifier', 'ns3::empty', 'ns3::DefaultDeleter<ns3::FlowClassifier>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::QueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## socket.h (module 'network'): ns3::Socket [class]
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::Socket::SocketErrno [enumeration]
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketType [enumeration]
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketPriority [enumeration]
module.add_enum('SocketPriority', ['NS3_PRIO_BESTEFFORT', 'NS3_PRIO_FILLER', 'NS3_PRIO_BULK', 'NS3_PRIO_INTERACTIVE_BULK', 'NS3_PRIO_INTERACTIVE', 'NS3_PRIO_CONTROL'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::Ipv6MulticastFilterMode [enumeration]
module.add_enum('Ipv6MulticastFilterMode', ['INCLUDE', 'EXCLUDE'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::SocketIpTosTag [class]
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTtlTag [class]
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag [class]
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag [class]
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketPriorityTag [class]
module.add_class('SocketPriorityTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag [class]
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class]
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class]
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## flow-classifier.h (module 'flow-monitor'): ns3::FlowClassifier [class]
module.add_class('FlowClassifier', parent=root_module['ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >'])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor [class]
module.add_class('FlowMonitor', parent=root_module['ns3::Object'])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats [struct]
module.add_class('FlowStats', outer_class=root_module['ns3::FlowMonitor'])
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >', u'ns3::FlowMonitor::FlowStatsContainer')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >*', u'ns3::FlowMonitor::FlowStatsContainer*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats >&', u'ns3::FlowMonitor::FlowStatsContainer&')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator', u'ns3::FlowMonitor::FlowStatsContainerI')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator*', u'ns3::FlowMonitor::FlowStatsContainerI*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > iterator&', u'ns3::FlowMonitor::FlowStatsContainerI&')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator', u'ns3::FlowMonitor::FlowStatsContainerCI')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator*', u'ns3::FlowMonitor::FlowStatsContainerCI*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowMonitor::FlowStats > const_iterator&', u'ns3::FlowMonitor::FlowStatsContainerCI&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >', u'ns3::FlowMonitor::FlowProbeContainer')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >*', u'ns3::FlowMonitor::FlowProbeContainer*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > >&', u'ns3::FlowMonitor::FlowProbeContainer&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator', u'ns3::FlowMonitor::FlowProbeContainerI')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator*', u'ns3::FlowMonitor::FlowProbeContainerI*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > iterator&', u'ns3::FlowMonitor::FlowProbeContainerI&')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator', u'ns3::FlowMonitor::FlowProbeContainerCI')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator*', u'ns3::FlowMonitor::FlowProbeContainerCI*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::FlowProbe > > const_iterator&', u'ns3::FlowMonitor::FlowProbeContainerCI&')
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe [class]
module.add_class('FlowProbe', parent=root_module['ns3::Object'])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats [struct]
module.add_class('FlowStats', outer_class=root_module['ns3::FlowProbe'])
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >', u'ns3::FlowProbe::Stats')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >*', u'ns3::FlowProbe::Stats*')
typehandlers.add_type_alias(u'std::map< unsigned int, ns3::FlowProbe::FlowStats >&', u'ns3::FlowProbe::Stats&')
## ipv4.h (module 'internet'): ns3::Ipv4 [class]
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier [class]
module.add_class('Ipv4FlowClassifier', parent=root_module['ns3::FlowClassifier'])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple [struct]
module.add_class('FiveTuple', outer_class=root_module['ns3::Ipv4FlowClassifier'])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::SortByCount [class]
module.add_class('SortByCount', outer_class=root_module['ns3::Ipv4FlowClassifier'])
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe [class]
module.add_class('Ipv4FlowProbe', parent=root_module['ns3::FlowProbe'])
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_NO_ROUTE', 'DROP_TTL_EXPIRE', 'DROP_BAD_CHECKSUM', 'DROP_QUEUE', 'DROP_QUEUE_DISC', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT', 'DROP_INVALID_REASON'], outer_class=root_module['ns3::Ipv4FlowProbe'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol [class]
module.add_class('Ipv4L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv4'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_BAD_CHECKSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'], import_from_module='ns.internet')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv4L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv4L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv4L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::DropTracedCallback&')
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute [class]
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route [class]
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol [class]
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::ErrorCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::ErrorCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::ErrorCallback&')
## ipv6.h (module 'internet'): ns3::Ipv6 [class]
module.add_class('Ipv6', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier [class]
module.add_class('Ipv6FlowClassifier', parent=root_module['ns3::FlowClassifier'])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple [struct]
module.add_class('FiveTuple', outer_class=root_module['ns3::Ipv6FlowClassifier'])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::SortByCount [class]
module.add_class('SortByCount', outer_class=root_module['ns3::Ipv6FlowClassifier'])
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe [class]
module.add_class('Ipv6FlowProbe', parent=root_module['ns3::FlowProbe'])
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_NO_ROUTE', 'DROP_TTL_EXPIRE', 'DROP_BAD_CHECKSUM', 'DROP_QUEUE', 'DROP_QUEUE_DISC', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT', 'DROP_INVALID_REASON'], outer_class=root_module['ns3::Ipv6FlowProbe'])
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol [class]
module.add_class('Ipv6L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv6'])
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv6L3Protocol'], import_from_module='ns.internet')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv6L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv6L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv6L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::DropTracedCallback&')
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache [class]
module.add_class('Ipv6PmtuCache', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&')
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&')
## queue-item.h (module 'network'): ns3::QueueItem [class]
module.add_class('QueueItem', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
## queue-item.h (module 'network'): ns3::QueueItem::Uint8Values [enumeration]
module.add_enum('Uint8Values', ['IP_DSFIELD'], outer_class=root_module['ns3::QueueItem'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )', u'ns3::QueueItem::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )*', u'ns3::QueueItem::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )&', u'ns3::QueueItem::TracedCallback&')
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv4L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv6L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## queue-item.h (module 'network'): ns3::QueueDiscItem [class]
module.add_class('QueueDiscItem', import_from_module='ns.network', parent=root_module['ns3::QueueItem'])
module.add_container('std::vector< ns3::Ipv6Address >', 'ns3::Ipv6Address', container_type=u'vector')
module.add_container('std::vector< unsigned int >', 'unsigned int', container_type=u'vector')
module.add_container('std::vector< unsigned long long >', 'long unsigned int', container_type=u'vector')
module.add_container('std::map< unsigned int, ns3::FlowMonitor::FlowStats >', ('unsigned int', 'ns3::FlowMonitor::FlowStats'), container_type=u'map')
module.add_container('std::vector< ns3::Ptr< ns3::FlowProbe > >', 'ns3::Ptr< ns3::FlowProbe >', container_type=u'vector')
module.add_container('std::map< unsigned int, ns3::FlowProbe::FlowStats >', ('unsigned int', 'ns3::FlowProbe::FlowStats'), container_type=u'map')
module.add_container('ns3::FlowProbe::Stats', ('unsigned int', 'ns3::FlowProbe::FlowStats'), container_type=u'map')
module.add_container('std::vector< std::pair< ns3::Ipv4Header::DscpType, unsigned int > >', 'std::pair< ns3::Ipv4Header::DscpType, unsigned int >', container_type=u'vector')
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type=u'map')
module.add_container('std::vector< std::pair< ns3::Ipv6Header::DscpType, unsigned int > >', 'std::pair< ns3::Ipv6Header::DscpType, unsigned int >', container_type=u'vector')
typehandlers.add_type_alias(u'uint32_t', u'ns3::FlowId')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::FlowId*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::FlowId&')
typehandlers.add_type_alias(u'uint32_t', u'ns3::FlowPacketId')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::FlowPacketId*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::FlowPacketId&')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
## Register a nested module for the namespace TracedValueCallback
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_types_ns3_TracedValueCallback(module):
root_module = module.get_root()
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )', u'ns3::TracedValueCallback::Time')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )*', u'ns3::TracedValueCallback::Time*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )&', u'ns3::TracedValueCallback::Time&')
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeAccessor >'])
register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeChecker >'])
register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeValue >'])
register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, root_module['ns3::DefaultDeleter< ns3::CallbackImplBase >'])
register_Ns3DefaultDeleter__Ns3EventImpl_methods(root_module, root_module['ns3::DefaultDeleter< ns3::EventImpl >'])
register_Ns3DefaultDeleter__Ns3FlowClassifier_methods(root_module, root_module['ns3::DefaultDeleter< ns3::FlowClassifier >'])
register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Hash::Implementation >'])
register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, root_module['ns3::DefaultDeleter< ns3::NixVector >'])
register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Packet >'])
register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::TraceSourceAccessor >'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3FlowMonitorHelper_methods(root_module, root_module['ns3::FlowMonitorHelper'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Histogram_methods(root_module, root_module['ns3::Histogram'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4InterfaceAddress_methods(root_module, root_module['ns3::Ipv4InterfaceAddress'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6InterfaceAddress_methods(root_module, root_module['ns3::Ipv6InterfaceAddress'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3Mac8Address_methods(root_module, root_module['ns3::Mac8Address'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv4Header_methods(root_module, root_module['ns3::Ipv4Header'])
register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3FlowClassifier_Ns3Empty_Ns3DefaultDeleter__lt__ns3FlowClassifier__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketPriorityTag_methods(root_module, root_module['ns3::SocketPriorityTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])
register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3FlowClassifier_methods(root_module, root_module['ns3::FlowClassifier'])
register_Ns3FlowMonitor_methods(root_module, root_module['ns3::FlowMonitor'])
register_Ns3FlowMonitorFlowStats_methods(root_module, root_module['ns3::FlowMonitor::FlowStats'])
register_Ns3FlowProbe_methods(root_module, root_module['ns3::FlowProbe'])
register_Ns3FlowProbeFlowStats_methods(root_module, root_module['ns3::FlowProbe::FlowStats'])
register_Ns3Ipv4_methods(root_module, root_module['ns3::Ipv4'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4FlowClassifier_methods(root_module, root_module['ns3::Ipv4FlowClassifier'])
register_Ns3Ipv4FlowClassifierFiveTuple_methods(root_module, root_module['ns3::Ipv4FlowClassifier::FiveTuple'])
register_Ns3Ipv4FlowClassifierSortByCount_methods(root_module, root_module['ns3::Ipv4FlowClassifier::SortByCount'])
register_Ns3Ipv4FlowProbe_methods(root_module, root_module['ns3::Ipv4FlowProbe'])
register_Ns3Ipv4L3Protocol_methods(root_module, root_module['ns3::Ipv4L3Protocol'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv4MulticastRoute_methods(root_module, root_module['ns3::Ipv4MulticastRoute'])
register_Ns3Ipv4Route_methods(root_module, root_module['ns3::Ipv4Route'])
register_Ns3Ipv4RoutingProtocol_methods(root_module, root_module['ns3::Ipv4RoutingProtocol'])
register_Ns3Ipv6_methods(root_module, root_module['ns3::Ipv6'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6FlowClassifier_methods(root_module, root_module['ns3::Ipv6FlowClassifier'])
register_Ns3Ipv6FlowClassifierFiveTuple_methods(root_module, root_module['ns3::Ipv6FlowClassifier::FiveTuple'])
register_Ns3Ipv6FlowClassifierSortByCount_methods(root_module, root_module['ns3::Ipv6FlowClassifier::SortByCount'])
register_Ns3Ipv6FlowProbe_methods(root_module, root_module['ns3::Ipv6FlowProbe'])
register_Ns3Ipv6L3Protocol_methods(root_module, root_module['ns3::Ipv6L3Protocol'])
register_Ns3Ipv6PmtuCache_methods(root_module, root_module['ns3::Ipv6PmtuCache'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3QueueItem_methods(root_module, root_module['ns3::QueueItem'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv4L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv6L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3QueueDiscItem_methods(root_module, root_module['ns3::QueueDiscItem'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'ns3::AttributeConstructionList::CIterator',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'ns3::AttributeConstructionList::CIterator',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetRemainingSize() const [member function]
cls.add_method('GetRemainingSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function]
cls.add_method('PeekU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Adjust(int32_t adjustment) [member function]
cls.add_method('Adjust',
'void',
[param('int32_t', 'adjustment')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeAccessor> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeAccessor > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeAccessor>::Delete(ns3::AttributeAccessor * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeAccessor *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeChecker> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeChecker > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeChecker>::Delete(ns3::AttributeChecker * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeChecker *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeValue> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeValue > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeValue>::Delete(ns3::AttributeValue * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeValue *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter(ns3::DefaultDeleter<ns3::CallbackImplBase> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::CallbackImplBase > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::CallbackImplBase>::Delete(ns3::CallbackImplBase * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::CallbackImplBase *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3EventImpl_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl>::DefaultDeleter(ns3::DefaultDeleter<ns3::EventImpl> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::EventImpl > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::EventImpl>::Delete(ns3::EventImpl * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::EventImpl *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3FlowClassifier_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::FlowClassifier>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::FlowClassifier>::DefaultDeleter(ns3::DefaultDeleter<ns3::FlowClassifier> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::FlowClassifier > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::FlowClassifier>::Delete(ns3::FlowClassifier * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::FlowClassifier *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter(ns3::DefaultDeleter<ns3::Hash::Implementation> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::Hash::Implementation > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Hash::Implementation>::Delete(ns3::Hash::Implementation * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Hash::Implementation *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter(ns3::DefaultDeleter<ns3::NixVector> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::NixVector > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::NixVector>::Delete(ns3::NixVector * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::NixVector *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter(ns3::DefaultDeleter<ns3::Packet> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::Packet > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Packet>::Delete(ns3::Packet * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Packet *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::TraceSourceAccessor> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::TraceSourceAccessor > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::TraceSourceAccessor>::Delete(ns3::TraceSourceAccessor * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::TraceSourceAccessor *', 'object')],
is_static=True)
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3FlowMonitorHelper_methods(root_module, cls):
## flow-monitor-helper.h (module 'flow-monitor'): ns3::FlowMonitorHelper::FlowMonitorHelper() [constructor]
cls.add_constructor([])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SetMonitorAttribute(std::string n1, ns3::AttributeValue const & v1) [member function]
cls.add_method('SetMonitorAttribute',
'void',
[param('std::string', 'n1'), param('ns3::AttributeValue const &', 'v1')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::Install(ns3::NodeContainer nodes) [member function]
cls.add_method('Install',
'ns3::Ptr< ns3::FlowMonitor >',
[param('ns3::NodeContainer', 'nodes')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::Install(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Install',
'ns3::Ptr< ns3::FlowMonitor >',
[param('ns3::Ptr< ns3::Node >', 'node')])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::InstallAll() [member function]
cls.add_method('InstallAll',
'ns3::Ptr< ns3::FlowMonitor >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowMonitor> ns3::FlowMonitorHelper::GetMonitor() [member function]
cls.add_method('GetMonitor',
'ns3::Ptr< ns3::FlowMonitor >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowClassifier> ns3::FlowMonitorHelper::GetClassifier() [member function]
cls.add_method('GetClassifier',
'ns3::Ptr< ns3::FlowClassifier >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): ns3::Ptr<ns3::FlowClassifier> ns3::FlowMonitorHelper::GetClassifier6() [member function]
cls.add_method('GetClassifier6',
'ns3::Ptr< ns3::FlowClassifier >',
[])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SerializeToXmlStream(std::ostream & os, uint16_t indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor-helper.h (module 'flow-monitor'): std::string ns3::FlowMonitorHelper::SerializeToXmlString(uint16_t indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlString',
'std::string',
[param('uint16_t', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor-helper.h (module 'flow-monitor'): void ns3::FlowMonitorHelper::SerializeToXmlFile(std::string fileName, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlFile',
'void',
[param('std::string', 'fileName'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Histogram_methods(root_module, cls):
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram(ns3::Histogram const & arg0) [constructor]
cls.add_constructor([param('ns3::Histogram const &', 'arg0')])
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram(double binWidth) [constructor]
cls.add_constructor([param('double', 'binWidth')])
## histogram.h (module 'flow-monitor'): ns3::Histogram::Histogram() [constructor]
cls.add_constructor([])
## histogram.h (module 'flow-monitor'): void ns3::Histogram::AddValue(double value) [member function]
cls.add_method('AddValue',
'void',
[param('double', 'value')])
## histogram.h (module 'flow-monitor'): uint32_t ns3::Histogram::GetBinCount(uint32_t index) [member function]
cls.add_method('GetBinCount',
'uint32_t',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinEnd(uint32_t index) [member function]
cls.add_method('GetBinEnd',
'double',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinStart(uint32_t index) [member function]
cls.add_method('GetBinStart',
'double',
[param('uint32_t', 'index')])
## histogram.h (module 'flow-monitor'): double ns3::Histogram::GetBinWidth(uint32_t index) const [member function]
cls.add_method('GetBinWidth',
'double',
[param('uint32_t', 'index')],
is_const=True)
## histogram.h (module 'flow-monitor'): uint32_t ns3::Histogram::GetNBins() const [member function]
cls.add_method('GetNBins',
'uint32_t',
[],
is_const=True)
## histogram.h (module 'flow-monitor'): void ns3::Histogram::SerializeToXmlStream(std::ostream & os, uint16_t indent, std::string elementName) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent'), param('std::string', 'elementName')],
is_const=True)
## histogram.h (module 'flow-monitor'): void ns3::Histogram::SetDefaultBinWidth(double binWidth) [member function]
cls.add_method('SetDefaultBinWidth',
'void',
[param('double', 'binWidth')])
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Inet6SocketAddress const & arg0) [constructor]
cls.add_constructor([param('ns3::Inet6SocketAddress const &', 'arg0')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6) [constructor]
cls.add_constructor([param('char const *', 'ipv6')])
## inet6-socket-address.h (module 'network'): static ns3::Inet6SocketAddress ns3::Inet6SocketAddress::ConvertFrom(ns3::Address const & addr) [member function]
cls.add_method('ConvertFrom',
'ns3::Inet6SocketAddress',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): ns3::Ipv6Address ns3::Inet6SocketAddress::GetIpv6() const [member function]
cls.add_method('GetIpv6',
'ns3::Ipv6Address',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): uint16_t ns3::Inet6SocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): static bool ns3::Inet6SocketAddress::IsMatchingType(ns3::Address const & addr) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetIpv6(ns3::Ipv6Address ipv6) [member function]
cls.add_method('SetIpv6',
'void',
[param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3InetSocketAddress_methods(root_module, cls):
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::InetSocketAddress const & arg0) [constructor]
cls.add_constructor([param('ns3::InetSocketAddress const &', 'arg0')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4) [constructor]
cls.add_constructor([param('char const *', 'ipv4')])
## inet-socket-address.h (module 'network'): static ns3::InetSocketAddress ns3::InetSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::InetSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): ns3::Ipv4Address ns3::InetSocketAddress::GetIpv4() const [member function]
cls.add_method('GetIpv4',
'ns3::Ipv4Address',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint16_t ns3::InetSocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint8_t ns3::InetSocketAddress::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): static bool ns3::InetSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetIpv4(ns3::Ipv4Address address) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ipv4Address', 'address')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4InterfaceAddress_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4Address local, ns3::Ipv4Mask mask) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'local'), param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4InterfaceAddress const & o) [constructor]
cls.add_constructor([param('ns3::Ipv4InterfaceAddress const &', 'o')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetLocal() const [member function]
cls.add_method('GetLocal',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Mask ns3::Ipv4InterfaceAddress::GetMask() const [member function]
cls.add_method('GetMask',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e ns3::Ipv4InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): bool ns3::Ipv4InterfaceAddress::IsSecondary() const [member function]
cls.add_method('IsSecondary',
'bool',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetBroadcast(ns3::Ipv4Address broadcast) [member function]
cls.add_method('SetBroadcast',
'void',
[param('ns3::Ipv4Address', 'broadcast')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetLocal(ns3::Ipv4Address local) [member function]
cls.add_method('SetLocal',
'void',
[param('ns3::Ipv4Address', 'local')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetMask(ns3::Ipv4Mask mask) [member function]
cls.add_method('SetMask',
'void',
[param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetPrimary() [member function]
cls.add_method('SetPrimary',
'void',
[])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetScope(ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetSecondary() [member function]
cls.add_method('SetSecondary',
'void',
[])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
deprecated=True, is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac8Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac8Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac8Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac8Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6InterfaceAddress_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address, ns3::Ipv6Prefix prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'prefix')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6InterfaceAddress const & o) [constructor]
cls.add_constructor([param('ns3::Ipv6InterfaceAddress const &', 'o')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6InterfaceAddress::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): uint32_t ns3::Ipv6InterfaceAddress::GetNsDadUid() const [member function]
cls.add_method('GetNsDadUid',
'uint32_t',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Prefix ns3::Ipv6InterfaceAddress::GetPrefix() const [member function]
cls.add_method('GetPrefix',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e ns3::Ipv6InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv6InterfaceAddress::Scope_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e ns3::Ipv6InterfaceAddress::GetState() const [member function]
cls.add_method('GetState',
'ns3::Ipv6InterfaceAddress::State_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): bool ns3::Ipv6InterfaceAddress::IsInSameSubnet(ns3::Ipv6Address b) const [member function]
cls.add_method('IsInSameSubnet',
'bool',
[param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetAddress(ns3::Ipv6Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetNsDadUid(uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('uint32_t', 'uid')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetScope(ns3::Ipv6InterfaceAddress::Scope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv6InterfaceAddress::Scope_e', 'scope')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetState(ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3Mac8Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address(ns3::Mac8Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac8Address const &', 'arg0')])
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address() [constructor]
cls.add_constructor([])
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address(uint8_t addr) [constructor]
cls.add_constructor([param('uint8_t', 'addr')])
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac8Address',
[],
is_static=True)
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac8Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac8-address.h (module 'network'): void ns3::Mac8Address::CopyFrom(uint8_t const * pBuffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'pBuffer')])
## mac8-address.h (module 'network'): void ns3::Mac8Address::CopyTo(uint8_t * pBuffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'pBuffer')],
is_const=True)
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac8Address',
[],
is_static=True)
## mac8-address.h (module 'network'): static bool ns3::Mac8Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::Iterator ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'ns3::NodeContainer::Iterator',
[],
is_const=True)
## node-container.h (module 'network'): bool ns3::NodeContainer::Contains(uint32_t id) const [member function]
cls.add_method('Contains',
'bool',
[param('uint32_t', 'id')],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): ns3::NodeContainer::Iterator ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'ns3::NodeContainer::Iterator',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::type [variable]
cls.add_instance_attribute('type', 'ns3::PacketMetadata::Item::ItemType', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 1 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static uint64_t ns3::Simulator::GetEventCount() [member function]
cls.add_method('GetEventCount',
'uint64_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & delay) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'delay')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t v) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t v) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [constructor]
cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]
cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(std::size_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(std::size_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::hash_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'ns3::TypeId::hash_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint16_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint16_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint16_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint16_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(std::size_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(ns3::TypeId::hash_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(ns3::TypeId::hash_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(std::size_t i, ns3::Ptr<const ns3::AttributeValue> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('std::size_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'uid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::int64x64_t'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('>=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(double const value) [constructor]
cls.add_constructor([param('double const', 'value')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long double const value) [constructor]
cls.add_constructor([param('long double const', 'value')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(int const v) [constructor]
cls.add_constructor([param('int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long int const v) [constructor]
cls.add_constructor([param('long int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int const v) [constructor]
cls.add_constructor([param('long long int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int const v) [constructor]
cls.add_constructor([param('unsigned int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int const v) [constructor]
cls.add_constructor([param('long unsigned int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int const v) [constructor]
cls.add_constructor([param('long long unsigned int const', 'v')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t const hi, uint64_t const lo) [constructor]
cls.add_constructor([param('int64_t const', 'hi'), param('uint64_t const', 'lo')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-128.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-128.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-128.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-128.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t const v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t const', 'v')],
is_static=True)
## int64x64-128.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
## int64x64-128.h (module 'core'): ns3::int64x64_t::implementation [variable]
cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv4Header_methods(root_module, cls):
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header(ns3::Ipv4Header const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Header const &', 'arg0')])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header() [constructor]
cls.add_constructor([])
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::DscpTypeToString(ns3::Ipv4Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv4Header::DscpType', 'dscp')],
is_const=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::EcnTypeToString(ns3::Ipv4Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv4Header::EcnType', 'ecn')],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::EnableChecksum() [member function]
cls.add_method('EnableChecksum',
'void',
[])
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType ns3::Ipv4Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv4Header::DscpType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType ns3::Ipv4Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv4Header::EcnType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetFragmentOffset() const [member function]
cls.add_method('GetFragmentOffset',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetIdentification() const [member function]
cls.add_method('GetIdentification',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::TypeId ns3::Ipv4Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetPayloadSize() const [member function]
cls.add_method('GetPayloadSize',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): static ns3::TypeId ns3::Ipv4Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsChecksumOk() const [member function]
cls.add_method('IsChecksumOk',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsDontFragment() const [member function]
cls.add_method('IsDontFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDestination(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'destination')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDontFragment() [member function]
cls.add_method('SetDontFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDscp(ns3::Ipv4Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv4Header::DscpType', 'dscp')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetEcn(ns3::Ipv4Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv4Header::EcnType', 'ecn')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetFragmentOffset(uint16_t offsetBytes) [member function]
cls.add_method('SetFragmentOffset',
'void',
[param('uint16_t', 'offsetBytes')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetIdentification(uint16_t identification) [member function]
cls.add_method('SetIdentification',
'void',
[param('uint16_t', 'identification')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetLastFragment() [member function]
cls.add_method('SetLastFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMayFragment() [member function]
cls.add_method('SetMayFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetPayloadSize(uint16_t size) [member function]
cls.add_method('SetPayloadSize',
'void',
[param('uint16_t', 'size')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetProtocol(uint8_t num) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint8_t', 'num')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetSource(ns3::Ipv4Address source) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'source')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Ipv6Header_methods(root_module, cls):
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header(ns3::Ipv6Header const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6Header const &', 'arg0')])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header() [constructor]
cls.add_constructor([])
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv6-header.h (module 'internet'): std::string ns3::Ipv6Header::DscpTypeToString(ns3::Ipv6Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv6Header::DscpType', 'dscp')],
is_const=True)
## ipv6-header.h (module 'internet'): std::string ns3::Ipv6Header::EcnTypeToString(ns3::Ipv6Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv6Header::EcnType', 'ecn')],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::DscpType ns3::Ipv6Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv6Header::DscpType',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::EcnType ns3::Ipv6Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv6Header::EcnType',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::TypeId ns3::Ipv6Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint16_t ns3::Ipv6Header::GetPayloadLength() const [member function]
cls.add_method('GetPayloadLength',
'uint16_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetSourceAddress() const [member function]
cls.add_method('GetSourceAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetTrafficClass() const [member function]
cls.add_method('GetTrafficClass',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): static ns3::TypeId ns3::Ipv6Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDestinationAddress(ns3::Ipv6Address dst) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDscp(ns3::Ipv6Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv6Header::DscpType', 'dscp')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetEcn(ns3::Ipv6Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv6Header::EcnType', 'ecn')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetFlowLabel(uint32_t flow) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flow')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetHopLimit(uint8_t limit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'limit')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetNextHeader(uint8_t next) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'next')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetPayloadLength(uint16_t len) [member function]
cls.add_method('SetPayloadLength',
'void',
[param('uint16_t', 'len')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetSourceAddress(ns3::Ipv6Address src) [member function]
cls.add_method('SetSourceAddress',
'void',
[param('ns3::Ipv6Address', 'src')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetTrafficClass(uint8_t traffic) [member function]
cls.add_method('SetTrafficClass',
'void',
[param('uint8_t', 'traffic')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function]
cls.add_method('IsInitialized',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<const ns3::Object> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3FlowClassifier_Ns3Empty_Ns3DefaultDeleter__lt__ns3FlowClassifier__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> >::SimpleRefCount(ns3::SimpleRefCount<ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter<ns3::FlowClassifier> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::FlowClassifier, ns3::empty, ns3::DefaultDeleter< ns3::FlowClassifier > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4MulticastRoute > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4Route > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount(ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter< ns3::QueueItem > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
return
def register_Ns3Socket_methods(root_module, cls):
## socket.h (module 'network'): ns3::Socket::Socket(ns3::Socket const & arg0) [constructor]
cls.add_constructor([param('ns3::Socket const &', 'arg0')])
## socket.h (module 'network'): ns3::Socket::Socket() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): int ns3::Socket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::BindToNetDevice(ns3::Ptr<ns3::NetDevice> netdevice) [member function]
cls.add_method('BindToNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'netdevice')],
is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): static ns3::Ptr<ns3::Socket> ns3::Socket::CreateSocket(ns3::Ptr<ns3::Node> node, ns3::TypeId tid) [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::TypeId', 'tid')],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Socket::GetBoundNetDevice() [member function]
cls.add_method('GetBoundNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[])
## socket.h (module 'network'): ns3::Socket::SocketErrno ns3::Socket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTos() const [member function]
cls.add_method('GetIpTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTtl() const [member function]
cls.add_method('GetIpTtl',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6HopLimit() const [member function]
cls.add_method('GetIpv6HopLimit',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6Tclass() const [member function]
cls.add_method('GetIpv6Tclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Socket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetPeerName(ns3::Address & address) const [member function]
cls.add_method('GetPeerName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetPriority() const [member function]
cls.add_method('GetPriority',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Socket::SocketType ns3::Socket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::Socket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): static uint8_t ns3::Socket::IpTos2Priority(uint8_t ipTos) [member function]
cls.add_method('IpTos2Priority',
'uint8_t',
[param('uint8_t', 'ipTos')],
is_static=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6JoinGroup(ns3::Ipv6Address address, ns3::Socket::Ipv6MulticastFilterMode filterMode, std::vector<ns3::Ipv6Address, std::allocator<ns3::Ipv6Address> > sourceAddresses) [member function]
cls.add_method('Ipv6JoinGroup',
'void',
[param('ns3::Ipv6Address', 'address'), param('ns3::Socket::Ipv6MulticastFilterMode', 'filterMode'), param('std::vector< ns3::Ipv6Address >', 'sourceAddresses')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6JoinGroup(ns3::Ipv6Address address) [member function]
cls.add_method('Ipv6JoinGroup',
'void',
[param('ns3::Ipv6Address', 'address')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::Ipv6LeaveGroup() [member function]
cls.add_method('Ipv6LeaveGroup',
'void',
[],
is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTos() const [member function]
cls.add_method('IsIpRecvTos',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTtl() const [member function]
cls.add_method('IsIpRecvTtl',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvHopLimit() const [member function]
cls.add_method('IsIpv6RecvHopLimit',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvTclass() const [member function]
cls.add_method('IsIpv6RecvTclass',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsRecvPktInfo() const [member function]
cls.add_method('IsRecvPktInfo',
'bool',
[],
is_const=True)
## socket.h (module 'network'): int ns3::Socket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv() [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[])
## socket.h (module 'network'): int ns3::Socket::Recv(uint8_t * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Recv',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::RecvFrom(uint8_t * buf, uint32_t size, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## socket.h (module 'network'): int ns3::Socket::Send(uint8_t const * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): int ns3::Socket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::SendTo(uint8_t const * buf, uint32_t size, uint32_t flags, ns3::Address const & address) [member function]
cls.add_method('SendTo',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address const &', 'address')])
## socket.h (module 'network'): void ns3::Socket::SetAcceptCallback(ns3::Callback<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionRequest, ns3::Callback<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> newConnectionCreated) [member function]
cls.add_method('SetAcceptCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionRequest'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'newConnectionCreated')])
## socket.h (module 'network'): bool ns3::Socket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetCloseCallbacks(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> normalClose, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> errorClose) [member function]
cls.add_method('SetCloseCallbacks',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'normalClose'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'errorClose')])
## socket.h (module 'network'): void ns3::Socket::SetConnectCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionSucceeded, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionFailed) [member function]
cls.add_method('SetConnectCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionSucceeded'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionFailed')])
## socket.h (module 'network'): void ns3::Socket::SetDataSentCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> dataSent) [member function]
cls.add_method('SetDataSentCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'dataSent')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTos(bool ipv4RecvTos) [member function]
cls.add_method('SetIpRecvTos',
'void',
[param('bool', 'ipv4RecvTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTtl(bool ipv4RecvTtl) [member function]
cls.add_method('SetIpRecvTtl',
'void',
[param('bool', 'ipv4RecvTtl')])
## socket.h (module 'network'): void ns3::Socket::SetIpTos(uint8_t ipTos) [member function]
cls.add_method('SetIpTos',
'void',
[param('uint8_t', 'ipTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpTtl(uint8_t ipTtl) [member function]
cls.add_method('SetIpTtl',
'void',
[param('uint8_t', 'ipTtl')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6HopLimit(uint8_t ipHopLimit) [member function]
cls.add_method('SetIpv6HopLimit',
'void',
[param('uint8_t', 'ipHopLimit')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvHopLimit(bool ipv6RecvHopLimit) [member function]
cls.add_method('SetIpv6RecvHopLimit',
'void',
[param('bool', 'ipv6RecvHopLimit')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvTclass(bool ipv6RecvTclass) [member function]
cls.add_method('SetIpv6RecvTclass',
'void',
[param('bool', 'ipv6RecvTclass')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6Tclass(int ipTclass) [member function]
cls.add_method('SetIpv6Tclass',
'void',
[param('int', 'ipTclass')])
## socket.h (module 'network'): void ns3::Socket::SetPriority(uint8_t priority) [member function]
cls.add_method('SetPriority',
'void',
[param('uint8_t', 'priority')])
## socket.h (module 'network'): void ns3::Socket::SetRecvCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('SetRecvCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## socket.h (module 'network'): void ns3::Socket::SetRecvPktInfo(bool flag) [member function]
cls.add_method('SetRecvPktInfo',
'void',
[param('bool', 'flag')])
## socket.h (module 'network'): void ns3::Socket::SetSendCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> sendCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'sendCb')])
## socket.h (module 'network'): int ns3::Socket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTtl() const [member function]
cls.add_method('IsManualIpTtl',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6HopLimit() const [member function]
cls.add_method('IsManualIpv6HopLimit',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6Tclass() const [member function]
cls.add_method('IsManualIpv6Tclass',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionFailed() [member function]
cls.add_method('NotifyConnectionFailed',
'void',
[],
visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::NotifyConnectionRequest(ns3::Address const & from) [member function]
cls.add_method('NotifyConnectionRequest',
'bool',
[param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionSucceeded() [member function]
cls.add_method('NotifyConnectionSucceeded',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataRecv() [member function]
cls.add_method('NotifyDataRecv',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataSent(uint32_t size) [member function]
cls.add_method('NotifyDataSent',
'void',
[param('uint32_t', 'size')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyErrorClose() [member function]
cls.add_method('NotifyErrorClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNewConnectionCreated(ns3::Ptr<ns3::Socket> socket, ns3::Address const & from) [member function]
cls.add_method('NotifyNewConnectionCreated',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket'), param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNormalClose() [member function]
cls.add_method('NotifyNormalClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifySend(uint32_t spaceAvailable) [member function]
cls.add_method('NotifySend',
'void',
[param('uint32_t', 'spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketIpTosTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag(ns3::SocketIpTosTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketIpTosTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTosTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTosTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTosTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTosTag::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTosTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag(ns3::SocketIpTtlTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketIpTtlTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTtlTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTtlTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTtlTag::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTtlTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3SocketIpv6HopLimitTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag(ns3::SocketIpv6HopLimitTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketIpv6HopLimitTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6HopLimitTag::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6HopLimitTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6HopLimitTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6HopLimitTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::SetHopLimit(uint8_t hopLimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hopLimit')])
return
def register_Ns3SocketIpv6TclassTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag(ns3::SocketIpv6TclassTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketIpv6TclassTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6TclassTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6TclassTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6TclassTag::GetTclass() const [member function]
cls.add_method('GetTclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6TclassTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::SetTclass(uint8_t tclass) [member function]
cls.add_method('SetTclass',
'void',
[param('uint8_t', 'tclass')])
return
def register_Ns3SocketPriorityTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketPriorityTag::SocketPriorityTag(ns3::SocketPriorityTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketPriorityTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketPriorityTag::SocketPriorityTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketPriorityTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketPriorityTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketPriorityTag::GetPriority() const [member function]
cls.add_method('GetPriority',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint32_t ns3::SocketPriorityTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketPriorityTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketPriorityTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketPriorityTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketPriorityTag::SetPriority(uint8_t priority) [member function]
cls.add_method('SetPriority',
'void',
[param('uint8_t', 'priority')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag(ns3::SocketSetDontFragmentTag const & arg0) [constructor]
cls.add_constructor([param('ns3::SocketSetDontFragmentTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## socket.h (module 'network'): ns3::TypeId ns3::SocketSetDontFragmentTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketSetDontFragmentTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketSetDontFragmentTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::SocketSetDontFragmentTag::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('>=')
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]
cls.add_method('As',
'ns3::TimeWithUnit',
[param('ns3::Time::Unit const', 'unit')],
is_const=True)
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<const ns3::CallbackImplBase> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::ObjectBase*'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'void'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::NetDevice> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Packet const> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'unsigned short'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Address const&'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::NetDevice::PacketType'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Socket> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'bool'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'unsigned int'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv4Header const&'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Ipv4> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv4L3Protocol::DropReason'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Ipv6> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv6Header const&'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ipv6L3Protocol::DropReason'])
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3FlowClassifier_methods(root_module, cls):
## flow-classifier.h (module 'flow-monitor'): ns3::FlowClassifier::FlowClassifier() [constructor]
cls.add_constructor([])
## flow-classifier.h (module 'flow-monitor'): void ns3::FlowClassifier::SerializeToXmlStream(std::ostream & os, uint16_t indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## flow-classifier.h (module 'flow-monitor'): ns3::FlowId ns3::FlowClassifier::GetNewFlowId() [member function]
cls.add_method('GetNewFlowId',
'ns3::FlowId',
[],
visibility='protected')
## flow-classifier.h (module 'flow-monitor'): void ns3::FlowClassifier::Indent(std::ostream & os, uint16_t level) const [member function]
cls.add_method('Indent',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'level')],
is_const=True, visibility='protected')
return
def register_Ns3FlowMonitor_methods(root_module, cls):
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowMonitor(ns3::FlowMonitor const & arg0) [constructor]
cls.add_constructor([param('ns3::FlowMonitor const &', 'arg0')])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowMonitor() [constructor]
cls.add_constructor([])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::AddFlowClassifier(ns3::Ptr<ns3::FlowClassifier> classifier) [member function]
cls.add_method('AddFlowClassifier',
'void',
[param('ns3::Ptr< ns3::FlowClassifier >', 'classifier')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::AddProbe(ns3::Ptr<ns3::FlowProbe> probe) [member function]
cls.add_method('AddProbe',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::CheckForLostPackets() [member function]
cls.add_method('CheckForLostPackets',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::CheckForLostPackets(ns3::Time maxDelay) [member function]
cls.add_method('CheckForLostPackets',
'void',
[param('ns3::Time', 'maxDelay')])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowProbeContainer const & ns3::FlowMonitor::GetAllProbes() const [member function]
cls.add_method('GetAllProbes',
'ns3::FlowMonitor::FlowProbeContainer const &',
[],
is_const=True)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStatsContainer const & ns3::FlowMonitor::GetFlowStats() const [member function]
cls.add_method('GetFlowStats',
'ns3::FlowMonitor::FlowStatsContainer const &',
[],
is_const=True)
## flow-monitor.h (module 'flow-monitor'): ns3::TypeId ns3::FlowMonitor::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## flow-monitor.h (module 'flow-monitor'): static ns3::TypeId ns3::FlowMonitor::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportDrop(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize, uint32_t reasonCode) [member function]
cls.add_method('ReportDrop',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('uint32_t', 'flowId'), param('uint32_t', 'packetId'), param('uint32_t', 'packetSize'), param('uint32_t', 'reasonCode')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportFirstTx(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportFirstTx',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('uint32_t', 'flowId'), param('uint32_t', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportForwarding(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportForwarding',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('uint32_t', 'flowId'), param('uint32_t', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::ReportLastRx(ns3::Ptr<ns3::FlowProbe> probe, ns3::FlowId flowId, ns3::FlowPacketId packetId, uint32_t packetSize) [member function]
cls.add_method('ReportLastRx',
'void',
[param('ns3::Ptr< ns3::FlowProbe >', 'probe'), param('uint32_t', 'flowId'), param('uint32_t', 'packetId'), param('uint32_t', 'packetSize')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::SerializeToXmlFile(std::string fileName, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlFile',
'void',
[param('std::string', 'fileName'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::SerializeToXmlStream(std::ostream & os, uint16_t indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): std::string ns3::FlowMonitor::SerializeToXmlString(uint16_t indent, bool enableHistograms, bool enableProbes) [member function]
cls.add_method('SerializeToXmlString',
'std::string',
[param('uint16_t', 'indent'), param('bool', 'enableHistograms'), param('bool', 'enableProbes')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::Start(ns3::Time const & time) [member function]
cls.add_method('Start',
'void',
[param('ns3::Time const &', 'time')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::StartRightNow() [member function]
cls.add_method('StartRightNow',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::StopRightNow() [member function]
cls.add_method('StopRightNow',
'void',
[])
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## flow-monitor.h (module 'flow-monitor'): void ns3::FlowMonitor::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3FlowMonitorFlowStats_methods(root_module, cls):
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::FlowStats() [constructor]
cls.add_constructor([])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::FlowStats(ns3::FlowMonitor::FlowStats const & arg0) [constructor]
cls.add_constructor([param('ns3::FlowMonitor::FlowStats const &', 'arg0')])
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::bytesDropped [variable]
cls.add_instance_attribute('bytesDropped', 'std::vector< unsigned long long >', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::delayHistogram [variable]
cls.add_instance_attribute('delayHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::delaySum [variable]
cls.add_instance_attribute('delaySum', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::flowInterruptionsHistogram [variable]
cls.add_instance_attribute('flowInterruptionsHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::jitterHistogram [variable]
cls.add_instance_attribute('jitterHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::jitterSum [variable]
cls.add_instance_attribute('jitterSum', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::lastDelay [variable]
cls.add_instance_attribute('lastDelay', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::lostPackets [variable]
cls.add_instance_attribute('lostPackets', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::packetSizeHistogram [variable]
cls.add_instance_attribute('packetSizeHistogram', 'ns3::Histogram', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::packetsDropped [variable]
cls.add_instance_attribute('packetsDropped', 'std::vector< unsigned int >', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::rxBytes [variable]
cls.add_instance_attribute('rxBytes', 'uint64_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::rxPackets [variable]
cls.add_instance_attribute('rxPackets', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeFirstRxPacket [variable]
cls.add_instance_attribute('timeFirstRxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeFirstTxPacket [variable]
cls.add_instance_attribute('timeFirstTxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeLastRxPacket [variable]
cls.add_instance_attribute('timeLastRxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timeLastTxPacket [variable]
cls.add_instance_attribute('timeLastTxPacket', 'ns3::Time', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::timesForwarded [variable]
cls.add_instance_attribute('timesForwarded', 'uint32_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::txBytes [variable]
cls.add_instance_attribute('txBytes', 'uint64_t', is_const=False)
## flow-monitor.h (module 'flow-monitor'): ns3::FlowMonitor::FlowStats::txPackets [variable]
cls.add_instance_attribute('txPackets', 'uint32_t', is_const=False)
return
def register_Ns3FlowProbe_methods(root_module, cls):
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::AddPacketDropStats(ns3::FlowId flowId, uint32_t packetSize, uint32_t reasonCode) [member function]
cls.add_method('AddPacketDropStats',
'void',
[param('uint32_t', 'flowId'), param('uint32_t', 'packetSize'), param('uint32_t', 'reasonCode')])
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::AddPacketStats(ns3::FlowId flowId, uint32_t packetSize, ns3::Time delayFromFirstProbe) [member function]
cls.add_method('AddPacketStats',
'void',
[param('uint32_t', 'flowId'), param('uint32_t', 'packetSize'), param('ns3::Time', 'delayFromFirstProbe')])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::Stats ns3::FlowProbe::GetStats() const [member function]
cls.add_method('GetStats',
'ns3::FlowProbe::Stats',
[],
is_const=True)
## flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::SerializeToXmlStream(std::ostream & os, uint16_t indent, uint32_t index) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent'), param('uint32_t', 'index')],
is_const=True)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowProbe(ns3::Ptr<ns3::FlowMonitor> flowMonitor) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'flowMonitor')],
visibility='protected')
## flow-probe.h (module 'flow-monitor'): void ns3::FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3FlowProbeFlowStats_methods(root_module, cls):
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::FlowStats(ns3::FlowProbe::FlowStats const & arg0) [constructor]
cls.add_constructor([param('ns3::FlowProbe::FlowStats const &', 'arg0')])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::FlowStats() [constructor]
cls.add_constructor([])
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::bytes [variable]
cls.add_instance_attribute('bytes', 'uint64_t', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::bytesDropped [variable]
cls.add_instance_attribute('bytesDropped', 'std::vector< unsigned long long >', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::delayFromFirstProbeSum [variable]
cls.add_instance_attribute('delayFromFirstProbeSum', 'ns3::Time', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::packets [variable]
cls.add_instance_attribute('packets', 'uint32_t', is_const=False)
## flow-probe.h (module 'flow-monitor'): ns3::FlowProbe::FlowStats::packetsDropped [variable]
cls.add_instance_attribute('packetsDropped', 'std::vector< unsigned int >', is_const=False)
return
def register_Ns3Ipv4_methods(root_module, cls):
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4(ns3::Ipv4 const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4 const &', 'arg0')])
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4() [constructor]
cls.add_constructor([])
## ipv4.h (module 'internet'): bool ns3::Ipv4::AddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForAddress(ns3::Ipv4Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForPrefix(ns3::Ipv4Address address, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'address'), param('ns3::Ipv4Mask', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): static ns3::TypeId ns3::Ipv4::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SourceAddressSelection(uint32_t interface, ns3::Ipv4Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv4Address',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4FlowClassifier_methods(root_module, cls):
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::Ipv4FlowClassifier() [constructor]
cls.add_constructor([])
## ipv4-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv4FlowClassifier::Classify(ns3::Ipv4Header const & ipHeader, ns3::Ptr<const ns3::Packet> ipPayload, uint32_t * out_flowId, uint32_t * out_packetId) [member function]
cls.add_method('Classify',
'bool',
[param('ns3::Ipv4Header const &', 'ipHeader'), param('ns3::Ptr< ns3::Packet const >', 'ipPayload'), param('uint32_t *', 'out_flowId'), param('uint32_t *', 'out_packetId')])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple ns3::Ipv4FlowClassifier::FindFlow(ns3::FlowId flowId) const [member function]
cls.add_method('FindFlow',
'ns3::Ipv4FlowClassifier::FiveTuple',
[param('uint32_t', 'flowId')],
is_const=True)
## ipv4-flow-classifier.h (module 'flow-monitor'): std::vector<std::pair<ns3::Ipv4Header::DscpType, unsigned int>, std::allocator<std::pair<ns3::Ipv4Header::DscpType, unsigned int> > > ns3::Ipv4FlowClassifier::GetDscpCounts(ns3::FlowId flowId) const [member function]
cls.add_method('GetDscpCounts',
'std::vector< std::pair< ns3::Ipv4Header::DscpType, unsigned int > >',
[param('uint32_t', 'flowId')],
is_const=True)
## ipv4-flow-classifier.h (module 'flow-monitor'): void ns3::Ipv4FlowClassifier::SerializeToXmlStream(std::ostream & os, uint16_t indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent')],
is_const=True, is_virtual=True)
return
def register_Ns3Ipv4FlowClassifierFiveTuple_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::FiveTuple() [constructor]
cls.add_constructor([])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::FiveTuple(ns3::Ipv4FlowClassifier::FiveTuple const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4FlowClassifier::FiveTuple const &', 'arg0')])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::destinationAddress [variable]
cls.add_instance_attribute('destinationAddress', 'ns3::Ipv4Address', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::destinationPort [variable]
cls.add_instance_attribute('destinationPort', 'uint16_t', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::protocol [variable]
cls.add_instance_attribute('protocol', 'uint8_t', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::sourceAddress [variable]
cls.add_instance_attribute('sourceAddress', 'ns3::Ipv4Address', is_const=False)
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::FiveTuple::sourcePort [variable]
cls.add_instance_attribute('sourcePort', 'uint16_t', is_const=False)
return
def register_Ns3Ipv4FlowClassifierSortByCount_methods(root_module, cls):
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::SortByCount::SortByCount() [constructor]
cls.add_constructor([])
## ipv4-flow-classifier.h (module 'flow-monitor'): ns3::Ipv4FlowClassifier::SortByCount::SortByCount(ns3::Ipv4FlowClassifier::SortByCount const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4FlowClassifier::SortByCount const &', 'arg0')])
## ipv4-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv4FlowClassifier::SortByCount::operator()(std::pair<ns3::Ipv4Header::DscpType, unsigned int> left, std::pair<ns3::Ipv4Header::DscpType, unsigned int> right) [member operator]
cls.add_method('operator()',
'bool',
[param('std::pair< ns3::Ipv4Header::DscpType, unsigned int >', 'left'), param('std::pair< ns3::Ipv4Header::DscpType, unsigned int >', 'right')],
custom_name=u'__call__')
return
def register_Ns3Ipv4FlowProbe_methods(root_module, cls):
## ipv4-flow-probe.h (module 'flow-monitor'): ns3::Ipv4FlowProbe::Ipv4FlowProbe(ns3::Ptr<ns3::FlowMonitor> monitor, ns3::Ptr<ns3::Ipv4FlowClassifier> classifier, ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'monitor'), param('ns3::Ptr< ns3::Ipv4FlowClassifier >', 'classifier'), param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::Ipv4FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-flow-probe.h (module 'flow-monitor'): void ns3::Ipv4FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv4L3Protocol_methods(root_module, cls):
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::Ipv4L3Protocol() [constructor]
cls.add_constructor([])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::AddAddress(uint32_t i, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Interface> ns3::Ipv4L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv4Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForAddress(ns3::Ipv4Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'addr')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForPrefix(ns3::Ipv4Address addr, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'addr'), param('ns3::Ipv4Mask', 'mask')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUnicast(ns3::Ipv4Address ad) const [member function]
cls.add_method('IsUnicast',
'bool',
[param('ns3::Ipv4Address', 'ad')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SourceAddressSelection(uint32_t interface, ns3::Ipv4Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv4Address',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'dest')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
visibility='private', is_virtual=True)
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv4MulticastRoute_methods(root_module, cls):
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute(ns3::Ipv4MulticastRoute const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MulticastRoute const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetGroup() const [member function]
cls.add_method('GetGroup',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetOrigin() const [member function]
cls.add_method('GetOrigin',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): std::map<unsigned int, unsigned int, std::less<unsigned int>, std::allocator<std::pair<const unsigned int, unsigned int> > > ns3::Ipv4MulticastRoute::GetOutputTtlMap() const [member function]
cls.add_method('GetOutputTtlMap',
'std::map< unsigned int, unsigned int >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetParent() const [member function]
cls.add_method('GetParent',
'uint32_t',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetGroup(ns3::Ipv4Address const group) [member function]
cls.add_method('SetGroup',
'void',
[param('ns3::Ipv4Address const', 'group')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOrigin(ns3::Ipv4Address const origin) [member function]
cls.add_method('SetOrigin',
'void',
[param('ns3::Ipv4Address const', 'origin')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOutputTtl(uint32_t oif, uint32_t ttl) [member function]
cls.add_method('SetOutputTtl',
'void',
[param('uint32_t', 'oif'), param('uint32_t', 'ttl')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetParent(uint32_t iif) [member function]
cls.add_method('SetParent',
'void',
[param('uint32_t', 'iif')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_INTERFACES [variable]
cls.add_static_attribute('MAX_INTERFACES', 'uint32_t const', is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_TTL [variable]
cls.add_static_attribute('MAX_TTL', 'uint32_t const', is_const=True)
return
def register_Ns3Ipv4Route_methods(root_module, cls):
cls.add_output_stream_operator()
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route(ns3::Ipv4Route const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Route const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetGateway() const [member function]
cls.add_method('GetGateway',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Route::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetDestination(ns3::Ipv4Address dest) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'dest')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetGateway(ns3::Ipv4Address gw) [member function]
cls.add_method('SetGateway',
'void',
[param('ns3::Ipv4Address', 'gw')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetOutputDevice(ns3::Ptr<ns3::NetDevice> outputDevice) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'outputDevice')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetSource(ns3::Ipv4Address src) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'src')])
return
def register_Ns3Ipv4RoutingProtocol_methods(root_module, cls):
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol() [constructor]
cls.add_constructor([])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol(ns3::Ipv4RoutingProtocol const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4RoutingProtocol const &', 'arg0')])
## ipv4-routing-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::Time::Unit unit=::ns3::Time::Unit::S) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::Time::Unit', 'unit', default_value='::ns3::Time::Unit::S')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): bool ns3::Ipv4RoutingProtocol::RouteInput(ns3::Ptr<const ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Ipv4RoutingProtocol::UnicastForwardCallback ucb, ns3::Ipv4RoutingProtocol::MulticastForwardCallback mcb, ns3::Ipv4RoutingProtocol::LocalDeliverCallback lcb, ns3::Ipv4RoutingProtocol::ErrorCallback ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv6_methods(root_module, cls):
## ipv6.h (module 'internet'): ns3::Ipv6::Ipv6(ns3::Ipv6 const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6 const &', 'arg0')])
## ipv6.h (module 'internet'): ns3::Ipv6::Ipv6() [constructor]
cls.add_constructor([])
## ipv6.h (module 'internet'): bool ns3::Ipv6::AddAddress(uint32_t interface, ns3::Ipv6InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv6InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForAddress(ns3::Ipv6Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv6Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): int32_t ns3::Ipv6::GetInterfaceForPrefix(ns3::Ipv6Address address, ns3::Ipv6Prefix mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint16_t ns3::Ipv6::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint16_t ns3::Ipv6::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): uint32_t ns3::Ipv6::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ptr<ns3::Ipv6RoutingProtocol> ns3::Ipv6::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv6RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): static ns3::TypeId ns3::Ipv6::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::RegisterExtensions() [member function]
cls.add_method('RegisterExtensions',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::RegisterOptions() [member function]
cls.add_method('RegisterOptions',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::RemoveAddress(uint32_t interface, ns3::Ipv6Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv6Address source, ns3::Ipv6Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv6Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv6Address', 'source'), param('ns3::Ipv6Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv6Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetRoutingProtocol(ns3::Ptr<ns3::Ipv6RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv6RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6::SourceAddressSelection(uint32_t interface, ns3::Ipv6Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv6Address',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'dest')],
is_pure_virtual=True, is_virtual=True)
## ipv6.h (module 'internet'): ns3::Ipv6::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): bool ns3::Ipv6::GetMtuDiscover() const [member function]
cls.add_method('GetMtuDiscover',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv6.h (module 'internet'): void ns3::Ipv6::SetMtuDiscover(bool mtuDiscover) [member function]
cls.add_method('SetMtuDiscover',
'void',
[param('bool', 'mtuDiscover')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6FlowClassifier_methods(root_module, cls):
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::Ipv6FlowClassifier() [constructor]
cls.add_constructor([])
## ipv6-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv6FlowClassifier::Classify(ns3::Ipv6Header const & ipHeader, ns3::Ptr<const ns3::Packet> ipPayload, uint32_t * out_flowId, uint32_t * out_packetId) [member function]
cls.add_method('Classify',
'bool',
[param('ns3::Ipv6Header const &', 'ipHeader'), param('ns3::Ptr< ns3::Packet const >', 'ipPayload'), param('uint32_t *', 'out_flowId'), param('uint32_t *', 'out_packetId')])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple ns3::Ipv6FlowClassifier::FindFlow(ns3::FlowId flowId) const [member function]
cls.add_method('FindFlow',
'ns3::Ipv6FlowClassifier::FiveTuple',
[param('uint32_t', 'flowId')],
is_const=True)
## ipv6-flow-classifier.h (module 'flow-monitor'): std::vector<std::pair<ns3::Ipv6Header::DscpType, unsigned int>, std::allocator<std::pair<ns3::Ipv6Header::DscpType, unsigned int> > > ns3::Ipv6FlowClassifier::GetDscpCounts(ns3::FlowId flowId) const [member function]
cls.add_method('GetDscpCounts',
'std::vector< std::pair< ns3::Ipv6Header::DscpType, unsigned int > >',
[param('uint32_t', 'flowId')],
is_const=True)
## ipv6-flow-classifier.h (module 'flow-monitor'): void ns3::Ipv6FlowClassifier::SerializeToXmlStream(std::ostream & os, uint16_t indent) const [member function]
cls.add_method('SerializeToXmlStream',
'void',
[param('std::ostream &', 'os'), param('uint16_t', 'indent')],
is_const=True, is_virtual=True)
return
def register_Ns3Ipv6FlowClassifierFiveTuple_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::FiveTuple() [constructor]
cls.add_constructor([])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::FiveTuple(ns3::Ipv6FlowClassifier::FiveTuple const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6FlowClassifier::FiveTuple const &', 'arg0')])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::destinationAddress [variable]
cls.add_instance_attribute('destinationAddress', 'ns3::Ipv6Address', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::destinationPort [variable]
cls.add_instance_attribute('destinationPort', 'uint16_t', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::protocol [variable]
cls.add_instance_attribute('protocol', 'uint8_t', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::sourceAddress [variable]
cls.add_instance_attribute('sourceAddress', 'ns3::Ipv6Address', is_const=False)
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::FiveTuple::sourcePort [variable]
cls.add_instance_attribute('sourcePort', 'uint16_t', is_const=False)
return
def register_Ns3Ipv6FlowClassifierSortByCount_methods(root_module, cls):
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::SortByCount::SortByCount() [constructor]
cls.add_constructor([])
## ipv6-flow-classifier.h (module 'flow-monitor'): ns3::Ipv6FlowClassifier::SortByCount::SortByCount(ns3::Ipv6FlowClassifier::SortByCount const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6FlowClassifier::SortByCount const &', 'arg0')])
## ipv6-flow-classifier.h (module 'flow-monitor'): bool ns3::Ipv6FlowClassifier::SortByCount::operator()(std::pair<ns3::Ipv6Header::DscpType, unsigned int> left, std::pair<ns3::Ipv6Header::DscpType, unsigned int> right) [member operator]
cls.add_method('operator()',
'bool',
[param('std::pair< ns3::Ipv6Header::DscpType, unsigned int >', 'left'), param('std::pair< ns3::Ipv6Header::DscpType, unsigned int >', 'right')],
custom_name=u'__call__')
return
def register_Ns3Ipv6FlowProbe_methods(root_module, cls):
## ipv6-flow-probe.h (module 'flow-monitor'): ns3::Ipv6FlowProbe::Ipv6FlowProbe(ns3::Ptr<ns3::FlowMonitor> monitor, ns3::Ptr<ns3::Ipv6FlowClassifier> classifier, ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::FlowMonitor >', 'monitor'), param('ns3::Ptr< ns3::Ipv6FlowClassifier >', 'classifier'), param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-flow-probe.h (module 'flow-monitor'): static ns3::TypeId ns3::Ipv6FlowProbe::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-flow-probe.h (module 'flow-monitor'): void ns3::Ipv6FlowProbe::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv6L3Protocol_methods(root_module, cls):
## ipv6-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv6L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6L3Protocol::Ipv6L3Protocol() [constructor]
cls.add_constructor([])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol, uint32_t interfaceIndex) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol'), param('uint32_t', 'interfaceIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv6L3Protocol::GetProtocol(int protocolNumber, int32_t interfaceIndex) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber'), param('int32_t', 'interfaceIndex')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv6L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDefaultTclass(uint8_t tclass) [member function]
cls.add_method('SetDefaultTclass',
'void',
[param('uint8_t', 'tclass')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv6Address source, ns3::Ipv6Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv6Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv6Address', 'source'), param('ns3::Ipv6Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv6Route >', 'route')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv6RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv6RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv6RoutingProtocol> ns3::Ipv6L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv6RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv6Interface> ns3::Ipv6L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv6Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForAddress(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForPrefix(ns3::Ipv6Address addr, ns3::Ipv6Prefix mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv6Address', 'addr'), param('ns3::Ipv6Prefix', 'mask')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): int32_t ns3::Ipv6L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::AddAddress(uint32_t i, ns3::Ipv6InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv6InterfaceAddress', 'address')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv6L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::RemoveAddress(uint32_t interfaceIndex, ns3::Ipv6Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('ns3::Ipv6Address', 'address')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv6L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv6L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6L3Protocol::SourceAddressSelection(uint32_t interface, ns3::Ipv6Address dest) [member function]
cls.add_method('SourceAddressSelection',
'ns3::Ipv6Address',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'dest')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Icmpv6L4Protocol> ns3::Ipv6L3Protocol::GetIcmpv6() const [member function]
cls.add_method('GetIcmpv6',
'ns3::Ptr< ns3::Icmpv6L4Protocol >',
[],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddAutoconfiguredAddress(uint32_t interface, ns3::Ipv6Address network, ns3::Ipv6Prefix mask, uint8_t flags, uint32_t validTime, uint32_t preferredTime, ns3::Ipv6Address defaultRouter=ns3::Ipv6Address::GetZero()) [member function]
cls.add_method('AddAutoconfiguredAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'network'), param('ns3::Ipv6Prefix', 'mask'), param('uint8_t', 'flags'), param('uint32_t', 'validTime'), param('uint32_t', 'preferredTime'), param('ns3::Ipv6Address', 'defaultRouter', default_value='ns3::Ipv6Address::GetZero()')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveAutoconfiguredAddress(uint32_t interface, ns3::Ipv6Address network, ns3::Ipv6Prefix mask, ns3::Ipv6Address defaultRouter) [member function]
cls.add_method('RemoveAutoconfiguredAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv6Address', 'network'), param('ns3::Ipv6Prefix', 'mask'), param('ns3::Ipv6Address', 'defaultRouter')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RegisterExtensions() [member function]
cls.add_method('RegisterExtensions',
'void',
[],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RegisterOptions() [member function]
cls.add_method('RegisterOptions',
'void',
[],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::ReportDrop(ns3::Ipv6Header ipHeader, ns3::Ptr<ns3::Packet> p, ns3::Ipv6L3Protocol::DropReason dropReason) [member function]
cls.add_method('ReportDrop',
'void',
[param('ns3::Ipv6Header', 'ipHeader'), param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6L3Protocol::DropReason', 'dropReason')],
is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddMulticastAddress(ns3::Ipv6Address address) [member function]
cls.add_method('AddMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::AddMulticastAddress(ns3::Ipv6Address address, uint32_t interface) [member function]
cls.add_method('AddMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveMulticastAddress(ns3::Ipv6Address address) [member function]
cls.add_method('RemoveMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::RemoveMulticastAddress(ns3::Ipv6Address address, uint32_t interface) [member function]
cls.add_method('RemoveMulticastAddress',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')])
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsRegisteredMulticastAddress(ns3::Ipv6Address address) const [member function]
cls.add_method('IsRegisteredMulticastAddress',
'bool',
[param('ns3::Ipv6Address', 'address')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::IsRegisteredMulticastAddress(ns3::Ipv6Address address, uint32_t interface) const [member function]
cls.add_method('IsRegisteredMulticastAddress',
'bool',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'interface')],
is_const=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetMtuDiscover(bool mtuDiscover) [member function]
cls.add_method('SetMtuDiscover',
'void',
[param('bool', 'mtuDiscover')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetMtuDiscover() const [member function]
cls.add_method('GetMtuDiscover',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): void ns3::Ipv6L3Protocol::SetSendIcmpv6Redirect(bool sendIcmpv6Redirect) [member function]
cls.add_method('SetSendIcmpv6Redirect',
'void',
[param('bool', 'sendIcmpv6Redirect')],
visibility='private', is_virtual=True)
## ipv6-l3-protocol.h (module 'internet'): bool ns3::Ipv6L3Protocol::GetSendIcmpv6Redirect() const [member function]
cls.add_method('GetSendIcmpv6Redirect',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv6PmtuCache_methods(root_module, cls):
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache::Ipv6PmtuCache(ns3::Ipv6PmtuCache const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PmtuCache const &', 'arg0')])
## ipv6-pmtu-cache.h (module 'internet'): ns3::Ipv6PmtuCache::Ipv6PmtuCache() [constructor]
cls.add_constructor([])
## ipv6-pmtu-cache.h (module 'internet'): void ns3::Ipv6PmtuCache::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## ipv6-pmtu-cache.h (module 'internet'): uint32_t ns3::Ipv6PmtuCache::GetPmtu(ns3::Ipv6Address dst) [member function]
cls.add_method('GetPmtu',
'uint32_t',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-pmtu-cache.h (module 'internet'): ns3::Time ns3::Ipv6PmtuCache::GetPmtuValidityTime() const [member function]
cls.add_method('GetPmtuValidityTime',
'ns3::Time',
[],
is_const=True)
## ipv6-pmtu-cache.h (module 'internet'): static ns3::TypeId ns3::Ipv6PmtuCache::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-pmtu-cache.h (module 'internet'): void ns3::Ipv6PmtuCache::SetPmtu(ns3::Ipv6Address dst, uint32_t pmtu) [member function]
cls.add_method('SetPmtu',
'void',
[param('ns3::Ipv6Address', 'dst'), param('uint32_t', 'pmtu')])
## ipv6-pmtu-cache.h (module 'internet'): bool ns3::Ipv6PmtuCache::SetPmtuValidityTime(ns3::Time validity) [member function]
cls.add_method('SetPmtuValidityTime',
'bool',
[param('ns3::Time', 'validity')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::NetDevice::PromiscReceiveCallback cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::NetDevice::ReceiveCallback cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): ns3::Time ns3::Node::GetLocalTime() const [member function]
cls.add_method('GetLocalTime',
'ns3::Time',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Node::DeviceAdditionListener listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Node::ProtocolHandler handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Node::DeviceAdditionListener listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Node::ProtocolHandler handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::ios_base::openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::ios_base::openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header, uint32_t size) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header'), param('uint32_t', 'size')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'nixVector')])
## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function]
cls.add_method('ToString',
'std::string',
[],
is_const=True)
return
def register_Ns3QueueItem_methods(root_module, cls):
cls.add_output_stream_operator()
## queue-item.h (module 'network'): ns3::QueueItem::QueueItem(ns3::Ptr<ns3::Packet> p) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet >', 'p')])
## queue-item.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::QueueItem::GetPacket() const [member function]
cls.add_method('GetPacket',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## queue-item.h (module 'network'): uint32_t ns3::QueueItem::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## queue-item.h (module 'network'): bool ns3::QueueItem::GetUint8Value(ns3::QueueItem::Uint8Values field, uint8_t & value) const [member function]
cls.add_method('GetUint8Value',
'bool',
[param('ns3::QueueItem::Uint8Values', 'field'), param('uint8_t &', 'value')],
is_const=True, is_virtual=True)
## queue-item.h (module 'network'): void ns3::QueueItem::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImpl<bool, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Socket> arg0, ns3::Address const & arg1) [member operator]
cls.add_method('operator()',
'bool',
[param('ns3::Ptr< ns3::Socket >', 'arg0'), param('ns3::Address const &', 'arg1')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): ns3::ObjectBase * ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()() [member operator]
cls.add_method('operator()',
'ns3::ObjectBase *',
[],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv4L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ipv4Header const & arg0, ns3::Ptr<const ns3::Packet> arg1, ns3::Ipv4L3Protocol::DropReason arg2, ns3::Ptr<ns3::Ipv4> arg3, unsigned int arg4) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ipv4Header const &', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('ns3::Ipv4L3Protocol::DropReason', 'arg2'), param('ns3::Ptr< ns3::Ipv4 >', 'arg3'), param('unsigned int', 'arg4')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Const_ns3Ipv4Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, const ns3::Ipv4Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ipv4Header const & arg0, ns3::Ptr<const ns3::Packet> arg1, unsigned int arg2) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ipv4Header const &', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('unsigned int', 'arg2')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ipv6L3ProtocolDropReason_Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ipv6Header const & arg0, ns3::Ptr<const ns3::Packet> arg1, ns3::Ipv6L3Protocol::DropReason arg2, ns3::Ptr<ns3::Ipv6> arg3, unsigned int arg4) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ipv6Header const &', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('ns3::Ipv6L3Protocol::DropReason', 'arg2'), param('ns3::Ptr< ns3::Ipv6 >', 'arg3'), param('unsigned int', 'arg4')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Const_ns3Ipv6Header___amp___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, const ns3::Ipv6Header &, ns3::Ptr<const ns3::Packet>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ipv6Header const & arg0, ns3::Ptr<const ns3::Packet> arg1, unsigned int arg2) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ipv6Header const &', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('unsigned int', 'arg2')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv4__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv4>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<const ns3::Packet> arg0, ns3::Ptr<ns3::Ipv4> arg1, unsigned int arg2) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'arg0'), param('ns3::Ptr< ns3::Ipv4 >', 'arg1'), param('unsigned int', 'arg2')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Ptr__lt__ns3Ipv6__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::Ptr<ns3::Ipv6>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<const ns3::Packet> arg0, ns3::Ptr<ns3::Ipv6> arg1, unsigned int arg2) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'arg0'), param('ns3::Ptr< ns3::Ipv6 >', 'arg1'), param('unsigned int', 'arg2')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0, ns3::Ptr<const ns3::Packet> arg1, short unsigned int arg2, ns3::Address const & arg3, ns3::Address const & arg4, ns3::NetDevice::PacketType arg5) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('short unsigned int', 'arg2'), param('ns3::Address const &', 'arg3'), param('ns3::Address const &', 'arg4'), param('ns3::NetDevice::PacketType', 'arg5')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Socket> arg0, ns3::Address const & arg1) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Socket >', 'arg0'), param('ns3::Address const &', 'arg1')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Socket> arg0) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Socket >', 'arg0')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Socket__gt___Unsigned_int_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Socket> arg0, unsigned int arg1) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Socket >', 'arg0'), param('unsigned int', 'arg1')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3QueueDiscItem_methods(root_module, cls):
## queue-item.h (module 'network'): ns3::QueueDiscItem::QueueDiscItem(ns3::Ptr<ns3::Packet> p, ns3::Address const & addr, uint16_t protocol) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Address const &', 'addr'), param('uint16_t', 'protocol')])
## queue-item.h (module 'network'): ns3::Address ns3::QueueDiscItem::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True)
## queue-item.h (module 'network'): uint16_t ns3::QueueDiscItem::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint16_t',
[],
is_const=True)
## queue-item.h (module 'network'): uint8_t ns3::QueueDiscItem::GetTxQueueIndex() const [member function]
cls.add_method('GetTxQueueIndex',
'uint8_t',
[],
is_const=True)
## queue-item.h (module 'network'): void ns3::QueueDiscItem::SetTxQueueIndex(uint8_t txq) [member function]
cls.add_method('SetTxQueueIndex',
'void',
[param('uint8_t', 'txq')])
## queue-item.h (module 'network'): ns3::Time ns3::QueueDiscItem::GetTimeStamp() const [member function]
cls.add_method('GetTimeStamp',
'ns3::Time',
[],
is_const=True)
## queue-item.h (module 'network'): void ns3::QueueDiscItem::SetTimeStamp(ns3::Time t) [member function]
cls.add_method('SetTimeStamp',
'void',
[param('ns3::Time', 't')])
## queue-item.h (module 'network'): void ns3::QueueDiscItem::AddHeader() [member function]
cls.add_method('AddHeader',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## queue-item.h (module 'network'): void ns3::QueueDiscItem::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## queue-item.h (module 'network'): bool ns3::QueueDiscItem::Mark() [member function]
cls.add_method('Mark',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## queue-item.h (module 'network'): uint32_t ns3::QueueDiscItem::Hash(uint32_t perturbation=0) const [member function]
cls.add_method('Hash',
'uint32_t',
[param('uint32_t', 'perturbation', default_value='0')],
is_const=True, is_virtual=True)
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.add_cpp_namespace('FatalImpl'), root_module)
register_functions_ns3_Hash(module.add_cpp_namespace('Hash'), root_module)
register_functions_ns3_TracedValueCallback(module.add_cpp_namespace('TracedValueCallback'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.add_cpp_namespace('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def register_functions_ns3_TracedValueCallback(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| 69.094568 | 934 | 0.631931 | [
"MIT"
] | rahul0324/Upgrade-AQM-Evaluation-Suite-of-ns-3 | ns-3-dev-git/src/flow-monitor/bindings/modulegen__gcc_ILP32.py | 530,439 | Python |
"""Generic Z-Wave Entity Classes."""
import copy
import logging
from openzwavemqtt.const import (
EVENT_INSTANCE_STATUS_CHANGED,
EVENT_VALUE_CHANGED,
OZW_READY_STATES,
CommandClass,
ValueIndex,
)
from openzwavemqtt.models.node import OZWNode
from openzwavemqtt.models.value import OZWValue
from homeassistant.const import ATTR_NAME, ATTR_SW_VERSION, ATTR_VIA_DEVICE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import DeviceInfo, Entity
from . import const
from .const import DOMAIN, PLATFORMS
from .discovery import check_node_schema, check_value_schema
_LOGGER = logging.getLogger(__name__)
OZW_READY_STATES_VALUES = {st.value for st in OZW_READY_STATES}
class ZWaveDeviceEntityValues:
"""Manages entity access to the underlying Z-Wave value objects."""
def __init__(self, hass, options, schema, primary_value):
"""Initialize the values object with the passed entity schema."""
self._hass = hass
self._entity_created = False
self._schema = copy.deepcopy(schema)
self._values = {}
self.options = options
# Go through values listed in the discovery schema, initialize them,
# and add a check to the schema to make sure the Instance matches.
for name, disc_settings in self._schema[const.DISC_VALUES].items():
self._values[name] = None
disc_settings[const.DISC_INSTANCE] = (primary_value.instance,)
self._values[const.DISC_PRIMARY] = primary_value
self._node = primary_value.node
self._schema[const.DISC_NODE_ID] = [self._node.node_id]
def async_setup(self):
"""Set up values instance."""
# Check values that have already been discovered for node
# and see if they match the schema and need added to the entity.
for value in self._node.values():
self.async_check_value(value)
# Check if all the _required_ values in the schema are present and
# create the entity.
self._async_check_entity_ready()
def __getattr__(self, name):
"""Get the specified value for this entity."""
return self._values.get(name, None)
def __iter__(self):
"""Allow iteration over all values."""
return iter(self._values.values())
def __contains__(self, name):
"""Check if the specified name/key exists in the values."""
return name in self._values
@callback
def async_check_value(self, value):
"""Check if the new value matches a missing value for this entity.
If a match is found, it is added to the values mapping.
"""
# Make sure the node matches the schema for this entity.
if not check_node_schema(value.node, self._schema):
return
# Go through the possible values for this entity defined by the schema.
for name, name_value in self._values.items():
# Skip if it's already been added.
if name_value is not None:
continue
# Skip if the value doesn't match the schema.
if not check_value_schema(value, self._schema[const.DISC_VALUES][name]):
continue
# Add value to mapping.
self._values[name] = value
# If the entity has already been created, notify it of the new value.
if self._entity_created:
async_dispatcher_send(
self._hass, f"{DOMAIN}_{self.values_id}_value_added"
)
# Check if entity has all required values and create the entity if needed.
self._async_check_entity_ready()
@callback
def _async_check_entity_ready(self):
"""Check if all required values are discovered and create entity."""
# Abort if the entity has already been created
if self._entity_created:
return
# Go through values defined in the schema and abort if a required value is missing.
for name, disc_settings in self._schema[const.DISC_VALUES].items():
if self._values[name] is None and not disc_settings.get(
const.DISC_OPTIONAL
):
return
# We have all the required values, so create the entity.
component = self._schema[const.DISC_COMPONENT]
_LOGGER.debug(
"Adding Node_id=%s Generic_command_class=%s, "
"Specific_command_class=%s, "
"Command_class=%s, Index=%s, Value type=%s, "
"Genre=%s as %s",
self._node.node_id,
self._node.node_generic,
self._node.node_specific,
self.primary.command_class,
self.primary.index,
self.primary.type,
self.primary.genre,
component,
)
self._entity_created = True
if component in PLATFORMS:
async_dispatcher_send(self._hass, f"{DOMAIN}_new_{component}", self)
@property
def values_id(self):
"""Identification for this values collection."""
return create_value_id(self.primary)
class ZWaveDeviceEntity(Entity):
"""Generic Entity Class for a Z-Wave Device."""
def __init__(self, values):
"""Initialize a generic Z-Wave device entity."""
self.values = values
self.options = values.options
@callback
def on_value_update(self):
"""Call when a value is added/updated in the entity EntityValues Collection.
To be overridden by platforms needing this event.
"""
async def async_added_to_hass(self):
"""Call when entity is added."""
# Add dispatcher and OZW listeners callbacks.
# Add to on_remove so they will be cleaned up on entity removal.
self.async_on_remove(
self.options.listen(EVENT_VALUE_CHANGED, self._value_changed)
)
self.async_on_remove(
self.options.listen(EVENT_INSTANCE_STATUS_CHANGED, self._instance_updated)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, const.SIGNAL_DELETE_ENTITY, self._delete_callback
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self.values.values_id}_value_added",
self._value_added,
)
)
@property
def device_info(self) -> DeviceInfo:
"""Return device information for the device registry."""
node = self.values.primary.node
node_instance = self.values.primary.instance
dev_id = create_device_id(node, self.values.primary.instance)
node_firmware = node.get_value(
CommandClass.VERSION, ValueIndex.VERSION_APPLICATION
)
device_info = DeviceInfo(
identifiers={(DOMAIN, dev_id)},
name=create_device_name(node),
manufacturer=node.node_manufacturer_name,
model=node.node_product_name,
)
if node_firmware is not None:
device_info[ATTR_SW_VERSION] = node_firmware.value
# device with multiple instances is split up into virtual devices for each instance
if node_instance > 1:
parent_dev_id = create_device_id(node)
device_info[ATTR_NAME] += f" - Instance {node_instance}"
device_info[ATTR_VIA_DEVICE] = (DOMAIN, parent_dev_id)
return device_info
@property
def extra_state_attributes(self):
"""Return the device specific state attributes."""
return {const.ATTR_NODE_ID: self.values.primary.node.node_id}
@property
def name(self):
"""Return the name of the entity."""
node = self.values.primary.node
return f"{create_device_name(node)}: {self.values.primary.label}"
@property
def unique_id(self):
"""Return the unique_id of the entity."""
return self.values.values_id
@property
def available(self) -> bool:
"""Return entity availability."""
# Use OZW Daemon status for availability.
instance_status = self.values.primary.ozw_instance.get_status()
return instance_status and instance_status.status in OZW_READY_STATES_VALUES
@callback
def _value_changed(self, value):
"""Call when a value from ZWaveDeviceEntityValues is changed.
Should not be overridden by subclasses.
"""
if value.value_id_key in (v.value_id_key for v in self.values if v):
self.on_value_update()
self.async_write_ha_state()
@callback
def _value_added(self):
"""Call when a value from ZWaveDeviceEntityValues is added.
Should not be overridden by subclasses.
"""
self.on_value_update()
@callback
def _instance_updated(self, new_status):
"""Call when the instance status changes.
Should not be overridden by subclasses.
"""
self.on_value_update()
self.async_write_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
async def _delete_callback(self, values_id):
"""Remove this entity."""
if not self.values:
return # race condition: delete already requested
if values_id == self.values.values_id:
await self.async_remove(force_remove=True)
def create_device_name(node: OZWNode):
"""Generate sensible (short) default device name from a OZWNode."""
# Prefer custom name set by OZWAdmin if present
if node.node_name:
return node.node_name
# Prefer short devicename from metadata if present
if node.meta_data and node.meta_data.get("Name"):
return node.meta_data["Name"]
# Fallback to productname or devicetype strings
if node.node_product_name:
return node.node_product_name
if node.node_device_type_string:
return node.node_device_type_string
if node.node_specific_string:
return node.node_specific_string
# Last resort: use Node id (should never happen, but just in case)
return f"Node {node.id}"
def create_device_id(node: OZWNode, node_instance: int = 1):
"""Generate unique device_id from a OZWNode."""
ozw_instance = node.parent.id
dev_id = f"{ozw_instance}.{node.node_id}.{node_instance}"
return dev_id
def create_value_id(value: OZWValue):
"""Generate unique value_id from an OZWValue."""
# [OZW_INSTANCE_ID]-[NODE_ID]-[VALUE_ID_KEY]
return f"{value.node.parent.id}-{value.node.id}-{value.value_id_key}"
| 35.131148 | 91 | 0.650863 | [
"Apache-2.0"
] | 2Fake/core | homeassistant/components/ozw/entity.py | 10,715 | Python |
"""App initialization file. Instantiates app, database, login_manager. Registers view blueprints. Defines user_loader callback for LoginManager."""
from flask import Flask
from flask_login import LoginManager
from flask_wtf.csrf import CSRFProtect
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
from database import init_db, session
from models import Base, Category, Item, User
from views.auth import authModule
from views.categories import categoryModule
from views.items import itemModule
from views.site import siteModule
login_manager = LoginManager()
app = Flask(__name__)
login_manager.init_app(app)
csrf = CSRFProtect(app)
init_db()
@login_manager.user_loader
def load_user(userid):
user = session.query(User).filter_by(id=userid).first()
print "Trying to load %s" % user
if user:
return user
else:
return None
@app.teardown_appcontext
def shutdown_session(exception=None):
session.remove()
app.register_blueprint(categoryModule)
app.register_blueprint(itemModule)
app.register_blueprint(authModule)
app.register_blueprint(siteModule)
| 25.468085 | 150 | 0.798663 | [
"MIT"
] | YouKnowBagu/item-catalog | app/__init__.py | 1,197 | Python |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'GetEventChannelResult',
'AwaitableGetEventChannelResult',
'get_event_channel',
]
@pulumi.output_type
class GetEventChannelResult:
"""
Event Channel.
"""
def __init__(__self__, destination=None, expiration_time_if_not_activated_utc=None, filter=None, id=None, name=None, partner_topic_friendly_description=None, partner_topic_readiness_state=None, provisioning_state=None, source=None, type=None):
if destination and not isinstance(destination, dict):
raise TypeError("Expected argument 'destination' to be a dict")
pulumi.set(__self__, "destination", destination)
if expiration_time_if_not_activated_utc and not isinstance(expiration_time_if_not_activated_utc, str):
raise TypeError("Expected argument 'expiration_time_if_not_activated_utc' to be a str")
pulumi.set(__self__, "expiration_time_if_not_activated_utc", expiration_time_if_not_activated_utc)
if filter and not isinstance(filter, dict):
raise TypeError("Expected argument 'filter' to be a dict")
pulumi.set(__self__, "filter", filter)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if partner_topic_friendly_description and not isinstance(partner_topic_friendly_description, str):
raise TypeError("Expected argument 'partner_topic_friendly_description' to be a str")
pulumi.set(__self__, "partner_topic_friendly_description", partner_topic_friendly_description)
if partner_topic_readiness_state and not isinstance(partner_topic_readiness_state, str):
raise TypeError("Expected argument 'partner_topic_readiness_state' to be a str")
pulumi.set(__self__, "partner_topic_readiness_state", partner_topic_readiness_state)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if source and not isinstance(source, dict):
raise TypeError("Expected argument 'source' to be a dict")
pulumi.set(__self__, "source", source)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def destination(self) -> Optional['outputs.EventChannelDestinationResponse']:
"""
Represents the destination of an event channel.
"""
return pulumi.get(self, "destination")
@property
@pulumi.getter(name="expirationTimeIfNotActivatedUtc")
def expiration_time_if_not_activated_utc(self) -> Optional[str]:
"""
Expiration time of the event channel. If this timer expires while the corresponding partner topic is never activated,
the event channel and corresponding partner topic are deleted.
"""
return pulumi.get(self, "expiration_time_if_not_activated_utc")
@property
@pulumi.getter
def filter(self) -> Optional['outputs.EventChannelFilterResponse']:
"""
Information about the filter for the event channel.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified identifier of the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="partnerTopicFriendlyDescription")
def partner_topic_friendly_description(self) -> Optional[str]:
"""
Friendly description about the topic. This can be set by the publisher/partner to show custom description for the customer partner topic.
This will be helpful to remove any ambiguity of the origin of creation of the partner topic for the customer.
"""
return pulumi.get(self, "partner_topic_friendly_description")
@property
@pulumi.getter(name="partnerTopicReadinessState")
def partner_topic_readiness_state(self) -> str:
"""
The readiness state of the corresponding partner topic.
"""
return pulumi.get(self, "partner_topic_readiness_state")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Provisioning state of the event channel.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def source(self) -> Optional['outputs.EventChannelSourceResponse']:
"""
Source of the event channel. This represents a unique resource in the partner's resource model.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of the resource
"""
return pulumi.get(self, "type")
class AwaitableGetEventChannelResult(GetEventChannelResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetEventChannelResult(
destination=self.destination,
expiration_time_if_not_activated_utc=self.expiration_time_if_not_activated_utc,
filter=self.filter,
id=self.id,
name=self.name,
partner_topic_friendly_description=self.partner_topic_friendly_description,
partner_topic_readiness_state=self.partner_topic_readiness_state,
provisioning_state=self.provisioning_state,
source=self.source,
type=self.type)
def get_event_channel(event_channel_name: Optional[str] = None,
partner_namespace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetEventChannelResult:
"""
Event Channel.
API Version: 2020-04-01-preview.
:param str event_channel_name: Name of the event channel.
:param str partner_namespace_name: Name of the partner namespace.
:param str resource_group_name: The name of the resource group within the user's subscription.
"""
__args__ = dict()
__args__['eventChannelName'] = event_channel_name
__args__['partnerNamespaceName'] = partner_namespace_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:eventgrid:getEventChannel', __args__, opts=opts, typ=GetEventChannelResult).value
return AwaitableGetEventChannelResult(
destination=__ret__.destination,
expiration_time_if_not_activated_utc=__ret__.expiration_time_if_not_activated_utc,
filter=__ret__.filter,
id=__ret__.id,
name=__ret__.name,
partner_topic_friendly_description=__ret__.partner_topic_friendly_description,
partner_topic_readiness_state=__ret__.partner_topic_readiness_state,
provisioning_state=__ret__.provisioning_state,
source=__ret__.source,
type=__ret__.type)
| 41.557895 | 247 | 0.691109 | [
"Apache-2.0"
] | pulumi/pulumi-azure-nextgen | sdk/python/pulumi_azure_nextgen/eventgrid/get_event_channel.py | 7,896 | Python |
# -*- coding: utf-8 -*-
# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test the archive_lib module."""
from __future__ import print_function
import mock
from chromite.cbuildbot import archive_lib
from chromite.cbuildbot import cbuildbot_run
from chromite.lib import config_lib
from chromite.lib import config_lib_unittest
from chromite.lib import cros_test_lib
from chromite.lib import parallel_unittest
DEFAULT_ARCHIVE_PREFIX = 'bogus_bucket/TheArchiveBase'
DEFAULT_ARCHIVE_BASE = 'gs://%s' % DEFAULT_ARCHIVE_PREFIX
DEFAULT_BUILDROOT = '/tmp/foo/bar/buildroot'
DEFAULT_BUILDNUMBER = 12345
DEFAULT_BRANCH = 'TheBranch'
DEFAULT_CHROME_BRANCH = 'TheChromeBranch'
DEFAULT_VERSION_STRING = 'TheVersionString'
DEFAULT_BOARD = 'TheBoard'
DEFAULT_BOT_NAME = 'TheCoolBot'
# Access to protected member.
# pylint: disable=W0212
DEFAULT_OPTIONS = cros_test_lib.EasyAttr(
archive_base=DEFAULT_ARCHIVE_BASE,
buildroot=DEFAULT_BUILDROOT,
buildnumber=DEFAULT_BUILDNUMBER,
buildbot=True,
branch=DEFAULT_BRANCH,
remote_trybot=False,
debug=False,
)
DEFAULT_CONFIG = config_lib.BuildConfig(
name=DEFAULT_BOT_NAME,
master=True,
boards=[DEFAULT_BOARD],
child_configs=[config_lib.BuildConfig(name='foo'),
config_lib.BuildConfig(name='bar'),
],
gs_path=config_lib.GS_PATH_DEFAULT
)
def _ExtendDefaultOptions(**kwargs):
"""Extend DEFAULT_OPTIONS with keys/values in kwargs."""
options_kwargs = DEFAULT_OPTIONS.copy()
options_kwargs.update(kwargs)
return cros_test_lib.EasyAttr(**options_kwargs)
def _ExtendDefaultConfig(**kwargs):
"""Extend DEFAULT_CONFIG with keys/values in kwargs."""
config_kwargs = DEFAULT_CONFIG.copy()
config_kwargs.update(kwargs)
return config_lib.BuildConfig(**config_kwargs)
def _NewBuilderRun(options=None, config=None):
"""Create a BuilderRun objection from options and config values.
Args:
options: Specify options or default to DEFAULT_OPTIONS.
config: Specify build config or default to DEFAULT_CONFIG.
Returns:
BuilderRun object.
"""
manager = parallel_unittest.FakeMultiprocessManager()
options = options or DEFAULT_OPTIONS
config = config or DEFAULT_CONFIG
site_config = config_lib_unittest.MockSiteConfig()
site_config[config.name] = config
return cbuildbot_run.BuilderRun(options, site_config, config, manager)
class GetBaseUploadURITest(cros_test_lib.TestCase):
"""Test the GetBaseUploadURI function."""
ARCHIVE_BASE = '/tmp/the/archive/base'
BOT_ID = 'TheNewBotId'
def setUp(self):
self.cfg = DEFAULT_CONFIG
def _GetBaseUploadURI(self, *args, **kwargs):
"""Test GetBaseUploadURI with archive_base and no bot_id."""
return archive_lib.GetBaseUploadURI(self.cfg, *args, **kwargs)
def testArchiveBase(self):
expected_result = '%s/%s' % (self.ARCHIVE_BASE, DEFAULT_BOT_NAME)
result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE)
self.assertEqual(expected_result, result)
def testArchiveBaseBotId(self):
expected_result = '%s/%s' % (self.ARCHIVE_BASE, self.BOT_ID)
result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
bot_id=self.BOT_ID)
self.assertEqual(expected_result, result)
def testBotId(self):
expected_result = ('%s/%s' %
(config_lib.GetConfig().params.ARCHIVE_URL,
self.BOT_ID))
result = self._GetBaseUploadURI(bot_id=self.BOT_ID)
self.assertEqual(expected_result, result)
def testDefaultGSPath(self):
"""Test GetBaseUploadURI with default gs_path value in config."""
self.cfg = _ExtendDefaultConfig(gs_path=config_lib.GS_PATH_DEFAULT)
# Test without bot_id.
expected_result = ('%s/%s' %
(config_lib.GetConfig().params.ARCHIVE_URL,
DEFAULT_BOT_NAME))
result = self._GetBaseUploadURI()
self.assertEqual(expected_result, result)
# Test with bot_id.
expected_result = ('%s/%s' %
(config_lib.GetConfig().params.ARCHIVE_URL,
self.BOT_ID))
result = self._GetBaseUploadURI(bot_id=self.BOT_ID)
self.assertEqual(expected_result, result)
def testOverrideGSPath(self):
"""Test GetBaseUploadURI with default gs_path value in config."""
self.cfg = _ExtendDefaultConfig(gs_path='gs://funkytown/foo/bar')
# Test without bot_id.
expected_result = 'gs://funkytown/foo/bar/TheCoolBot'
result = self._GetBaseUploadURI()
self.assertEqual(expected_result, result)
# Test with bot_id.
expected_result = 'gs://funkytown/foo/bar/TheNewBotId'
result = self._GetBaseUploadURI(bot_id=self.BOT_ID)
self.assertEqual(expected_result, result)
class ArchiveTest(cros_test_lib.TestCase):
"""Test the Archive class."""
_VERSION = '6543.2.1'
def _GetAttributeValue(self, attr, options=None, config=None):
with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
m.return_value = self._VERSION
run = _NewBuilderRun(options, config)
return getattr(run.GetArchive(), attr)
def testVersion(self):
value = self._GetAttributeValue('version')
self.assertEqual(self._VERSION, value)
def testVersionNotReady(self):
run = _NewBuilderRun()
self.assertRaises(AttributeError, getattr, run, 'version')
def testArchivePathTrybot(self):
options = _ExtendDefaultOptions(buildbot=False)
value = self._GetAttributeValue('archive_path', options=options)
expected_value = ('%s/%s/%s/%s' %
(DEFAULT_BUILDROOT,
archive_lib.Archive._TRYBOT_ARCHIVE,
DEFAULT_BOT_NAME,
self._VERSION))
self.assertEqual(expected_value, value)
def testArchivePathBuildbot(self):
value = self._GetAttributeValue('archive_path')
expected_value = ('%s/%s/%s/%s' %
(DEFAULT_BUILDROOT,
archive_lib.Archive._BUILDBOT_ARCHIVE,
DEFAULT_BOT_NAME,
self._VERSION))
self.assertEqual(expected_value, value)
def testUploadUri(self):
value = self._GetAttributeValue('upload_url')
expected_value = '%s/%s/%s' % (DEFAULT_ARCHIVE_BASE,
DEFAULT_BOT_NAME,
self._VERSION)
self.assertEqual(expected_value, value)
def testDownloadURLBuildbot(self):
value = self._GetAttributeValue('download_url')
expected_value = ('%s%s/%s/%s' %
(archive_lib.gs.PRIVATE_BASE_HTTPS_DOWNLOAD_URL,
DEFAULT_ARCHIVE_PREFIX,
DEFAULT_BOT_NAME,
self._VERSION))
self.assertEqual(expected_value, value)
def testDownloadURLFileBuildbot(self):
value = self._GetAttributeValue('download_url_file')
expected_value = ('%s%s/%s/%s' %
(archive_lib.gs.PRIVATE_BASE_HTTPS_URL,
DEFAULT_ARCHIVE_PREFIX,
DEFAULT_BOT_NAME,
self._VERSION))
self.assertEqual(expected_value, value)
| 34.311321 | 77 | 0.688617 | [
"BSD-3-Clause"
] | FLOSSBoxIN/src | third_party/chromite/cbuildbot/archive_lib_unittest.py | 7,274 | Python |
"""This module contains the general information for AdaptorEthCompQueueProfile ManagedObject."""
from ...ucscmo import ManagedObject
from ...ucsccoremeta import UcscVersion, MoPropertyMeta, MoMeta
from ...ucscmeta import VersionMeta
class AdaptorEthCompQueueProfileConsts():
pass
class AdaptorEthCompQueueProfile(ManagedObject):
"""This is AdaptorEthCompQueueProfile class."""
consts = AdaptorEthCompQueueProfileConsts()
naming_props = set([])
mo_meta = MoMeta("AdaptorEthCompQueueProfile", "adaptorEthCompQueueProfile", "eth-comp-q", VersionMeta.Version111a, "InputOutput", 0x1f, [], ["admin", "ls-config-policy", "ls-network", "ls-server-policy"], [u'adaptorHostEthIfProfile', u'adaptorUsnicConnDef'], [], ["Get", "Set"])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version111a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"count": MoPropertyMeta("count", "count", "ushort", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, [], ["1-2000"]),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"ring_size": MoPropertyMeta("ring_size", "ringSize", "ushort", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], ["1-1"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"childAction": "child_action",
"count": "count",
"dn": "dn",
"ringSize": "ring_size",
"rn": "rn",
"status": "status",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.count = None
self.ring_size = None
self.status = None
ManagedObject.__init__(self, "AdaptorEthCompQueueProfile", parent_mo_or_dn, **kwargs)
| 49.446809 | 283 | 0.669105 | [
"Apache-2.0"
] | depereo/ucscsdk | ucscsdk/mometa/adaptor/AdaptorEthCompQueueProfile.py | 2,324 | Python |
from userbot import bot, logger
from telethon import TelegramClient, events
from config import user
from telethon.tl.functions.users import GetFullUserRequest
@bot.on(events.NewMessage(**user))
async def getUser(event):
logger.info("user plugin is called")
pattern_string = event.pattern_match.string
entity = pattern_string[pattern_string.find("(")+1:pattern_string.find(")")]
logger.info(f"entity to search - {entity}")
try:
info = await bot(GetFullUserRequest(entity))
await event.respond(f"""
Username - `{info.user.username}`
{"User is a bot" if info.user.bot else "user is not a bot"}
{"User is restricted for " + info.user.restriction_reason if info.user.restricted else "User is not restricted"}
Name - {info.user.first_name} {info.user.last_name if info.user.last_name else ""}
Status - `{info.about}`
id - {info.user.id}
{info.common_chats_count} groups common with me
{"I have blocked this user" if info.blocked else "I have not blocked this user"}
""")
except Exception:
await event.respond(f"Cannot find entity with `{entity}`") | 42 | 113 | 0.725275 | [
"MIT"
] | fosslife/grambot | plugins/user.py | 1,092 | Python |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Comment.article_version'
db.add_column(u'wiking_comment', 'article_version',
self.gf('django.db.models.fields.related.ForeignKey')(default=-1, related_name='comments', to=orm['wiking.ArticleVersion']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Comment.article_version'
db.delete_column(u'wiking_comment', 'article_version_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wiking.article': {
'Meta': {'object_name': 'Article'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'head': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'article'", 'to': "orm['wiking.ArticleVersion']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_articles'", 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'childs'", 'null': 'True', 'to': "orm['wiking.Article']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'wiking.articleversion': {
'Meta': {'object_name': 'ArticleVersion'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '255', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'wiking.comment': {
'Meta': {'object_name': 'Comment'},
'article_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['wiking.ArticleVersion']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'max_length': '1000'})
}
}
complete_apps = ['wiking'] | 68.103448 | 187 | 0.571983 | [
"Apache-2.0"
] | lenarhus/opengift.io | wiking/migrations/0004_auto__add_field_comment_article_version.py | 5,925 | Python |
#!/usr/bin/python3
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
"""
Manage database of SV JJ device fab/measurement parameters
File format: SQLite
Tables: barrier (dep structure), shape, josephson (measured params),
trend (fitted Jc, RnA, IcRn)
BB, 2015
"""
print('hello')
import sqlite3
# Restrict table or column name for security
def scrub(table_name):
return ''.join( chr for chr in table_name if chr.isalnum() or chr=='_' )
# Build string: 'column1=?,column2=?,...'
def assignform(collist):
s = ' '
for col in collist:
s += col + '=?,'
return s.rstrip(',')
class SVJJDB():
def __init__(self, filename='svjj.db'):
# Table structures
self.colnames = {
'barrier': ['wafer', 'chip', 'structure', 'fm1_name',
'fm1_thickness', 'fm2_name', 'fm2_thickness'],
'shape': ['wafer', 'chip', 'device', 'shape', 'dim1', 'dim2'],
'josephson': ['wafer', 'chip', 'device', 'temperature',
'ic_p', 'ic_ap', 'r_p', 'r_ap'],
#'trend': ['wafer', 'structure', 'jc_p', 'jc_ap',
# 'fm1_thickness', 'fm2_name', 'fm2_thickness'],
}
self.datatypes = {'wafer': 'string', 'chip':'string',
'structure': 'string','shape':'string', 'fm1_name': 'string',
'fm2_name': 'string', 'fm1_thickness': 'float',
'fm2_thickness': 'float', 'device': 'string', 'dim1': 'float',
'dim2': 'float', 'temperature': 'float', 'ic_p': 'float',
'ic_ap': 'float', 'r_p': 'float', 'r_ap': 'float'}
# Default values
self.val0 = {'wafer': 'B150323a', 'chip': '56',
'structure': 'Fe/Cu/Ni/Cu', 'shape':'ellipse', 'fm1_name': 'Fe',
'fm2_name': 'Ni', 'fm1_thickness': '1e-9',
'fm2_thickness': '2.4e-9', 'device': 'A01', 'dim1': '1e-6',
'dim2': '1e-6', 'temperature': 4, 'ic_p': '10e-6',
'ic_ap': '5e-6', 'r_p': '1', 'r_ap': '1'}
self.conn = sqlite3.connect(filename)
self.c = self.conn.cursor()
def create_tables(self):
# Create barrier structure table
self.c.execute('''CREATE TABLE barrier
(wafer text, chip text, structure text,
fm1_name text, fm1_thickness real,
fm2_name text, fm2_thickness real)''')
# Create device shape table
self.c.execute('''CREATE TABLE shape
(wafer text, chip text, device text,
shape text, dim1 real, dim2 real)''')
# Create josephson measurement result table
self.c.execute('''CREATE TABLE josephson
(wafer text, chip text, device text, temperature real,
ic_p real, ic_ap real, r_p real, r_ap real)''')
def close(self, save=True):
if save: self.conn.commit() # save
self.conn.close()
# Insert a row in barrier table
def insert_row(self, table, arg):
s1 = 'INSERT INTO %s VALUES ' % scrub(table)
s2 = '(' + '?,'*(len(arg)-1) + '?)'
self.c.execute(s1+s2, arg)
def print_table(self, table):
print(self.colnames[table])
for row in self.c.execute('SELECT * FROM %s'%scrub(table)):
print(row)
#def print_table(self, table, ordercol):
#for row in self.c.execute('SELECT * FROM %s ORDER BY ?'%scrub(table),\
# (ordercol,)):
def delete_row(self, table, args):
if table == 'barrier':
self.c.execute('DELETE FROM %s WHERE wafer=? AND chip=?'
% scrub(table), args)
elif table == 'shape' or table == 'josephson':
self.c.execute('DELETE FROM %s WHERE '
'wafer=? AND chip=? AND device=?' % scrub(table), args)
else:
print('No table name: %s' % table)
def update_row(self, table, vals, **newassign):
s1 = 'UPDATE %s' % scrub(table)
s2 = ' SET' + assignform(self.colnames[table])
s3 = ' WHERE' + assignform(matchcols)
print(s1+s2+s3)
#self.c.execute(s1 + s2 + s3, vals + matchvals)
# Derived class for interactive shell execution
class SVJJDBInteract(SVJJDB):
#def create_db(self, *arg):
# self.create_tables(*arg) # pass filename
def print(self, table):
self.print_table(table)
# Get inputs from argument or interactively
# Use val0 as default for interactive case
def input_param(self, key, val0='0', **kwargs):
interact = kwargs.get('interact', True)
datatype = kwargs.get('datatype', 'string')
if interact:
msg = input(key + '? [%s] '%str(val0))
if msg == '': msg = val0 # empty input means default
if datatype == 'string': val = msg
if datatype == 'int': val = int(msg)
if datatype == 'float': val = float(msg)
else:
val = val0
return val
def insert(self, table):
vals = ()
for col in self.colnames[table]:
vals = vals + (self.input_param(col, self.val0[col],\
datatype=self.datatypes[col], interact=True),)
self.insert_row(table, vals)
def delete(self, table, *args):
self.delete_row(table, args)
# *args = wafer, chip, [device]
def update(self, table, **newassign):
# Load previous values as default (val0)
# User input
vals = ()
for col in self.colnames[table]:
vals = vals + (self.input_param(col, self.val0[col],\
datatype=self.datatypes[col], interact=True),)
self.update_row(table, vals, **newassign)
# Pass on any SQL statement
def execsql(self, *cmd):
self.c.execute(cmd[0])
def args2kwargs(args):
l = []
for arg in args:
l += [arg.split('=')]
return dict(l)
# main shell interface (run SVJJDBInteract class)
def app(argv):
"""Execute in system shell
"""
if len(argv) < 2:
print("Usage: python %s <command> <table> [<column1>=<value1> [...]]\n"
" <command>: print, insert, delete, or edit\n"
" <table>: barrier, shape, or josephson\n" % argv[0])
sys.exit(0)
db = SVJJDBInteract()
# Fixed arguments
funcname = argv[1]
table = argv[2]
# Convert extra to keyword arguments
kwargs = args2kwargs(argv[3:])
getattr(db, funcname)(table, **kwargs)
db.close()
# simple test run
def app2(argv):
db = SVJJDB()
#db.create_tables()
db.insert_row('barrier', ('B150413', '22', 'Fe/Cu/Ni/Cu', 'Fe', 1e-9,\
'Ni', 2.4e-9))
db.print_table('barrier', 'chip')
db.close()
if __name__ == '__main__':
import sys
print(sys.version)
app(sys.argv)
print('Bye!')
| 33.970588 | 80 | 0.540404 | [
"MIT"
] | bebaek/cryomem | cryomem/cmtools/lib/old/sql_svjj_old2.py | 6,930 | Python |
import json
import yaml
from jsonschema import validate
import os
configuration_file = os.environ['SC_ENABLER_CONF']
with open(configuration_file, 'r') as conf_file:
input_config = yaml.safe_load(conf_file)
with open("./input_schema_validator.json", 'r') as schema_file:
schema = json.load(schema_file)
def test_input_params():
validate(instance=input_config, schema=schema)
| 23.058824 | 63 | 0.772959 | [
"Apache-2.0"
] | cszelesbbs/servicecatalogenabler | test_validate_input_file.py | 392 | Python |
import logging
import os.path
from copy import deepcopy
import pkg_resources
import salt.config
import salt.loader
import salt.modules.boto_elb as boto_elb
import salt.utils.versions
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase, skipIf
# pylint: disable=import-error
try:
import boto
boto.ENDPOINTS_PATH = os.path.join(
RUNTIME_VARS.TESTS_DIR, "unit/files/endpoints.json"
)
import boto.ec2.elb
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
try:
from moto import mock_ec2_deprecated, mock_elb_deprecated
HAS_MOTO = True
except ImportError:
HAS_MOTO = False
def mock_ec2_deprecated(self):
"""
if the mock_ec2_deprecated function is not available due to import failure
this replaces the decorated function with stub_function.
Allows boto_elb unit tests to use the @mock_ec2_deprecated decorator
without a "NameError: name 'mock_ec2_deprecated' is not defined" error.
"""
def stub_function(self):
pass
return stub_function
def mock_elb_deprecated(self):
"""
if the mock_elb_deprecated function is not available due to import failure
this replaces the decorated function with stub_function.
Allows boto_elb unit tests to use the @mock_elb_deprecated decorator
without a "NameError: name 'mock_elb_deprecated' is not defined" error.
"""
def stub_function(self):
pass
return stub_function
# pylint: enable=import-error
log = logging.getLogger(__name__)
region = "us-east-1"
access_key = "GKTADJGHEIQSXMKKRBJ08H"
secret_key = "askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs"
conn_parameters = {
"region": region,
"key": access_key,
"keyid": secret_key,
"profile": {},
}
boto_conn_parameters = {
"aws_access_key_id": access_key,
"aws_secret_access_key": secret_key,
}
instance_parameters = {"instance_type": "t1.micro"}
required_moto = "0.3.7"
required_moto_py3 = "1.0.1"
def _has_required_moto():
"""
Returns True or False depending on if ``moto`` is installed and at the correct version,
depending on what version of Python is running these tests.
"""
if not HAS_MOTO:
return False
else:
moto_version = salt.utils.versions.LooseVersion(
pkg_resources.get_distribution("moto").version
)
if moto_version < salt.utils.versions.LooseVersion(required_moto):
return False
elif moto_version < salt.utils.versions.LooseVersion(required_moto_py3):
return False
return True
@skipIf(HAS_BOTO is False, "The boto module must be installed.")
@skipIf(HAS_MOTO is False, "The moto module must be installed.")
@skipIf(
_has_required_moto() is False,
"The moto module must be >= to {} for "
"PY2 or {} for PY3.".format(required_moto, required_moto_py3),
)
class BotoElbTestCase(TestCase, LoaderModuleMockMixin):
"""
TestCase for salt.modules.boto_elb module
"""
def setup_loader_modules(self):
opts = salt.config.DEFAULT_MASTER_OPTS.copy()
utils = salt.loader.utils(
opts, whitelist=["boto", "args", "systemd", "path", "platform"]
)
funcs = salt.loader.minion_mods(opts, utils=utils)
return {boto_elb: {"__opts__": opts, "__utils__": utils, "__salt__": funcs}}
def setUp(self):
TestCase.setUp(self)
# __virtual__ must be caller in order for _get_conn to be injected
boto_elb.__virtual__()
@mock_ec2_deprecated
@mock_elb_deprecated
def test_register_instances_valid_id_result_true(self):
"""
tests that given a valid instance id and valid ELB that
register_instances returns True.
"""
conn_ec2 = boto.ec2.connect_to_region(region, **boto_conn_parameters)
conn_elb = boto.ec2.elb.connect_to_region(region, **boto_conn_parameters)
zones = [zone.name for zone in conn_ec2.get_all_zones()]
elb_name = "TestRegisterInstancesValidIdResult"
conn_elb.create_load_balancer(elb_name, zones, [(80, 80, "http")])
reservations = conn_ec2.run_instances("ami-08389d60")
register_result = boto_elb.register_instances(
elb_name, reservations.instances[0].id, **conn_parameters
)
self.assertEqual(True, register_result)
@mock_ec2_deprecated
@mock_elb_deprecated
def test_register_instances_valid_id_string(self):
"""
tests that given a string containing a instance id and valid ELB that
register_instances adds the given instance to an ELB
"""
conn_ec2 = boto.ec2.connect_to_region(region, **boto_conn_parameters)
conn_elb = boto.ec2.elb.connect_to_region(region, **boto_conn_parameters)
zones = [zone.name for zone in conn_ec2.get_all_zones()]
elb_name = "TestRegisterInstancesValidIdResult"
conn_elb.create_load_balancer(elb_name, zones, [(80, 80, "http")])
reservations = conn_ec2.run_instances("ami-08389d60")
boto_elb.register_instances(
elb_name, reservations.instances[0].id, **conn_parameters
)
load_balancer_refreshed = conn_elb.get_all_load_balancers(elb_name)[0]
registered_instance_ids = [
instance.id for instance in load_balancer_refreshed.instances
]
log.debug(load_balancer_refreshed.instances)
self.assertEqual([reservations.instances[0].id], registered_instance_ids)
@mock_ec2_deprecated
@mock_elb_deprecated
def test_deregister_instances_valid_id_result_true(self):
"""
tests that given an valid id the boto_elb deregister_instances method
removes exactly one of a number of ELB registered instances
"""
conn_ec2 = boto.ec2.connect_to_region(region, **boto_conn_parameters)
conn_elb = boto.ec2.elb.connect_to_region(region, **boto_conn_parameters)
zones = [zone.name for zone in conn_ec2.get_all_zones()]
elb_name = "TestDeregisterInstancesValidIdResult"
load_balancer = conn_elb.create_load_balancer(
elb_name, zones, [(80, 80, "http")]
)
reservations = conn_ec2.run_instances("ami-08389d60")
load_balancer.register_instances(reservations.instances[0].id)
deregister_result = boto_elb.deregister_instances(
elb_name, reservations.instances[0].id, **conn_parameters
)
self.assertEqual(True, deregister_result)
@mock_ec2_deprecated
@mock_elb_deprecated
def test_deregister_instances_valid_id_string(self):
"""
tests that given an valid id the boto_elb deregister_instances method
removes exactly one of a number of ELB registered instances
"""
conn_ec2 = boto.ec2.connect_to_region(region, **boto_conn_parameters)
conn_elb = boto.ec2.elb.connect_to_region(region, **boto_conn_parameters)
zones = [zone.name for zone in conn_ec2.get_all_zones()]
elb_name = "TestDeregisterInstancesValidIdString"
load_balancer = conn_elb.create_load_balancer(
elb_name, zones, [(80, 80, "http")]
)
reservations = conn_ec2.run_instances("ami-08389d60", min_count=2)
all_instance_ids = [instance.id for instance in reservations.instances]
load_balancer.register_instances(all_instance_ids)
boto_elb.deregister_instances(
elb_name, reservations.instances[0].id, **conn_parameters
)
load_balancer_refreshed = conn_elb.get_all_load_balancers(elb_name)[0]
expected_instances = deepcopy(all_instance_ids)
expected_instances.remove(reservations.instances[0].id)
actual_instances = [
instance.id for instance in load_balancer_refreshed.instances
]
self.assertEqual(actual_instances, expected_instances)
@mock_ec2_deprecated
@mock_elb_deprecated
def test_deregister_instances_valid_id_list(self):
"""
tests that given an valid ids in the form of a list that the boto_elb
deregister_instances all members of the given list
"""
conn_ec2 = boto.ec2.connect_to_region(region, **boto_conn_parameters)
conn_elb = boto.ec2.elb.connect_to_region(region, **boto_conn_parameters)
zones = [zone.name for zone in conn_ec2.get_all_zones()]
elb_name = "TestDeregisterInstancesValidIdList"
load_balancer = conn_elb.create_load_balancer(
elb_name, zones, [(80, 80, "http")]
)
reservations = conn_ec2.run_instances("ami-08389d60", min_count=3)
all_instance_ids = [instance.id for instance in reservations.instances]
load_balancer.register_instances(all_instance_ids)
# reservations.instances[:-1] refers to all instances except list
# instance
deregister_instances = [instance.id for instance in reservations.instances[:-1]]
expected_instances = [reservations.instances[-1].id]
boto_elb.deregister_instances(elb_name, deregister_instances, **conn_parameters)
load_balancer_refreshed = conn_elb.get_all_load_balancers(elb_name)[0]
actual_instances = [
instance.id for instance in load_balancer_refreshed.instances
]
self.assertEqual(actual_instances, expected_instances)
| 38.279352 | 91 | 0.696563 | [
"Apache-2.0"
] | Anujsahu902/salt | tests/unit/modules/test_boto_elb.py | 9,455 | Python |
from six.moves.urllib_parse import quote
from jet_bridge_base import settings
from jet_bridge_base.responses.base import Response
from jet_bridge_base.responses.redirect import RedirectResponse
from jet_bridge_base.status import HTTP_400_BAD_REQUEST
from jet_bridge_base.views.base.api import APIView
class RegisterView(APIView):
def get(self, *args, **kwargs):
if not settings.PROJECT:
return Response('Project name is not set', status=HTTP_400_BAD_REQUEST)
if not settings.TOKEN:
return Response('Project token is not set', status=HTTP_400_BAD_REQUEST)
token = self.request.get_argument('token', '')
install_type = self.request.get_argument('install_type', '')
if settings.WEB_BASE_URL.startswith('https') and not self.request.full_url().startswith('https'):
web_base_url = 'http{}'.format(settings.WEB_BASE_URL[5:])
else:
web_base_url = settings.WEB_BASE_URL
if token:
url = '{}/projects/register/{}'.format(web_base_url, token)
else:
url = '{}/projects/register'.format(web_base_url)
parameters = [
['project', settings.PROJECT],
['referrer', self.request.full_url().encode('utf8')],
]
if install_type:
parameters.append(['install_type', install_type])
query_string = '&'.join(map(lambda x: '{}={}'.format(x[0], quote(x[1])), parameters))
return RedirectResponse('%s?%s' % (url, query_string))
| 35.488372 | 105 | 0.659895 | [
"MIT"
] | RamsesMartinez/jet-bridge | packages/jet_bridge_base/jet_bridge_base/views/register.py | 1,526 | Python |
import datetime
from app import db
class BucketList(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(100), unique=True)
description = db.Column(db.Text, nullable=True)
interests = db.Column(db.String(120), nullable=True)
date_created = db.Column(db.DateTime, default=datetime.datetime.utcnow())
date_modified = db.Column(db.DateTime)
created_by = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
items = db.relationship('Item', backref='bucket_list_items', lazy='dynamic')
def __repr__(self):
return "<Bucketlist {}>".format(self.name)
class Item(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(100), unique=True)
description = db.Column(db.Text)
status = db.Column(db.Text)
date_accomplished = db.Column(db.DateTime)
date_created = db.Column(db.DateTime, default=datetime.datetime.utcnow())
date_modified = db.Column(db.DateTime)
bucketlists = db.Column(db.Integer, db.ForeignKey('bucket_list.id'), nullable=False)
def __repr__(self):
return "<Items {}>".format(self.name)
| 38.451613 | 88 | 0.703859 | [
"MIT"
] | SerryJohns/bucket-list | app/api/v1/models/bucketlist.py | 1,192 | Python |
import logging
import os
import subprocess
import tempfile
from argparse import Namespace
from pathlib import Path
from .error import EvalError
from .manifest import Repo, load_manifest, update_lock_file
from .path import EVALREPO_PATH, LOCK_PATH, MANIFEST_PATH, nixpkgs_path
from .prefetch import prefetch
logger = logging.getLogger(__name__)
def eval_repo(repo: Repo, repo_path: Path) -> None:
with tempfile.TemporaryDirectory() as d:
eval_path = Path(d).joinpath("default.nix")
with open(eval_path, "w") as f:
f.write(
f"""
with import <nixpkgs> {{}};
import {EVALREPO_PATH} {{
name = "{repo.name}";
url = "{repo.url}";
src = {repo_path.joinpath(repo.file)};
inherit pkgs lib;
}}
"""
)
# fmt: off
cmd = [
"nix-env",
"-f", str(eval_path),
"-qa", "*",
"--meta",
"--xml",
"--allowed-uris", "https://static.rust-lang.org",
"--option", "restrict-eval", "true",
"--option", "allow-import-from-derivation", "true",
"--drv-path",
"--show-trace",
"-I", f"nixpkgs={nixpkgs_path()}",
"-I", str(repo_path),
"-I", str(eval_path),
"-I", str(EVALREPO_PATH),
]
# fmt: on
logger.info(f"Evaluate repository {repo.name}")
env = dict(PATH=os.environ["PATH"], NIXPKGS_ALLOW_UNSUPPORTED_SYSTEM="1")
proc = subprocess.Popen(cmd, env=env, stdout=subprocess.DEVNULL)
try:
res = proc.wait(10)
except subprocess.TimeoutExpired:
raise EvalError(f"evaluation for {repo.name} timed out of after 10 seconds")
if res != 0:
raise EvalError(f"{repo.name} does not evaluate:\n$ {' '.join(cmd)}")
def update(repo: Repo) -> Repo:
repo, locked_version, repo_path = prefetch(repo)
if repo_path:
eval_repo(repo, repo_path)
repo.locked_version = locked_version
return repo
def update_command(args: Namespace) -> None:
logging.basicConfig(level=logging.INFO)
manifest = load_manifest(MANIFEST_PATH, LOCK_PATH)
for repo in manifest.repos:
try:
update(repo)
except EvalError as err:
if repo.locked_version is None:
# likely a repository added in a pull request, make it fatal then
logger.error(
f"repository {repo.name} failed to evaluate: {err}. This repo is not yet in our lock file!!!!"
)
raise
# Do not print stack traces
logger.error(f"repository {repo.name} failed to evaluate: {err}")
except Exception:
# for non-evaluation errors we want the stack trace
logger.exception(f"Failed to updated repository {repo.name}")
update_lock_file(manifest.repos, LOCK_PATH)
| 31.212766 | 114 | 0.582822 | [
"MIT"
] | nixos-users/NUR | ci/nur/update.py | 2,934 | Python |
import json
from videomaker.functions.packageData import packageData
from videomaker.functions.addPreset import addOption
def savePreset(focus):
preset = packageData(focus, verify=False)
with open("./presets/{0}.json".format(preset["subredditName"]), "w+") as out:
json.dump(preset, out, indent=4)
addOption(focus, "./presets/{0}.json".format(preset["subredditName"]), preset["subredditName"])
| 41.5 | 99 | 0.73253 | [
"MIT"
] | TheTimebike/VideoMaker-Studio | videomaker/functions/savePreset.py | 415 | Python |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import framework.configbase
import math
import time
import numpy as np
from modules.transformer_encoder import Encoder
from modules.transformer_decoder import Decoder
decay1 = [(i+1)*20**(-1) for i in range(20)]
decay2 = [1-(i+1)*50**(-1) for i in range(50)]
class TransformerConfig(framework.configbase.ModuleConfig):
def __init__(self):
super(TransformerConfig, self).__init__()
self.vocab = 0
self.max_words_in_sent = 150
self.ft_dim = 4096
self.d_model = 512
self.enc_n_layers = 3
self.dec_n_layers = 3
self.heads = 8
self.dropout = 0.1
self.keyframes = False
self.rl = False
self.document_freq = None
class Transformer(nn.Module):
def __init__(self, config):
super(Transformer, self).__init__()
self.config = config
self.encoder = Encoder(self.config.ft_dim, self.config.d_model, self.config.enc_n_layers, self.config.heads, self.config.dropout, self.config.keyframes, act=True)
self.decoder = Decoder(self.config.vocab, self.config.d_model, self.config.dec_n_layers, self.config.heads, self.config.dropout, act=True)
self.dropout = nn.Dropout(self.config.dropout)
self.logit = nn.Linear(self.config.d_model, self.config.vocab)
self.logit.weight = self.decoder.embed.embed.weight
self.remove_gate = nn.Linear(self.config.d_model, 1)
self.add_gate = nn.Linear(self.config.d_model, 1)
self.q_linear = nn.Linear(self.config.d_model, self.config.d_model, bias=False)
self.next_attn = nn.Linear(2*self.config.d_model, 1)
self.init_weights()
def init_weights(self,):
for p in self.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
def forward(self, src, trg, src_mask, trg_mask):
e_outputs, org_key, select = self.encoder(src, src_mask)
add_state = torch.tensor(decay2[:e_outputs.size(1)]+[0]*max(0,e_outputs.size(1)-50)).cuda().unsqueeze(0).unsqueeze(-1)
memory_bank = e_outputs * add_state
d_output, attn_weights = [], []
for i in range(1, trg.size(1)+1):
word, attn, _ = self.decoder(trg[:,i-1].unsqueeze(1), memory_bank, src_mask, trg_mask[:,i-1,i-1].unsqueeze(1), step=i-1)
d_output.append(word[:,-1])
attn_weights.append(attn[:,:,-1].mean(dim=1))
memory_bank, add_state = self.update_memory(memory_bank, add_state, e_outputs, attn_weights[-20:], d_output[-20:])
output = self.logit(torch.cat([_.unsqueeze(1) for _ in d_output], 1))
return output, org_key, select
def update_memory(self, memory_bank, add_state, e_outputs, attn, query_s):
remove_prob = torch.sigmoid(self.remove_gate(query_s[-1])).unsqueeze(-1)
add_prob = torch.sigmoid(self.add_gate(query_s[-1])).unsqueeze(-1)
temp = torch.softmax(torch.tensor(decay1[20-len(attn):]).cuda(), dim=-1)
attn = sum([attn[i]*temp[i] for i in range(len(attn))]).unsqueeze(-1)
# remove for diversity
query_s = sum([query_s[i]*temp[i] for i in range(len(query_s))])
sim = torch.sigmoid(torch.matmul(memory_bank, self.q_linear(query_s).unsqueeze(-1)))
memory_bank = memory_bank * (1 - remove_prob * attn * sim)
# add for coherence
last_ctx = (e_outputs * attn).sum(dim=1, keepdim=True)
next_attn = torch.sigmoid(self.next_attn(torch.cat([e_outputs,last_ctx.expand_as(e_outputs)], dim=-1)))
memory_bank = memory_bank + e_outputs * (1-add_state) * (add_prob*next_attn)
add_state = add_state + (1-add_state) * (add_prob*next_attn)
return memory_bank, add_state
def sample(self, src, src_mask, decoding='greedy'):
init_tok = 2
eos_tok = 3
if self.config.keyframes:
e_outputs, src_mask = self.encoder.get_keyframes(src, src_mask)
else:
e_outputs, _, _ = self.encoder(src, src_mask)
add_state = torch.tensor(decay2[:e_outputs.size(1)]+[0]*max(0,e_outputs.size(1)-50)).cuda().unsqueeze(0).unsqueeze(-1)
memory_bank = e_outputs * add_state
outputs = torch.ones(src.size(0), 1).fill_(init_tok).long().cuda()
seqLogprobs = torch.zeros(src.size(0), 60).cuda()
attn_weights, d_output = [], []
for i in range(1, 60):
trg_mask = self.nopeak_mask(i)
word, attn, _ = self.decoder(outputs[:,-1].unsqueeze(1), memory_bank, src_mask, trg_mask[:,-1,-1].unsqueeze(1), step=i-1)
attn_weights.append(attn[:,:,-1].mean(dim=1))
d_output.append(word[:,-1])
out = self.logit(word)
logprobs = F.log_softmax(out[:,-1], dim=-1)
if decoding == 'greedy':
_, next_word = torch.max(logprobs, dim=1)
next_word = next_word.unsqueeze(-1)
else:
probs = torch.exp(logprobs.data).cpu()
next_word = torch.multinomial(probs, 1).cuda()
seqLogprobs[:,i] = logprobs.gather(1, next_word).view(-1)
outputs = torch.cat([outputs, next_word], dim=1)
memory_bank, add_state = self.update_memory(memory_bank, add_state, e_outputs, attn_weights[-20:], d_output[-20:])
attn_weights = torch.cat([_.unsqueeze(1) for _ in attn_weights], dim=1)
return outputs, seqLogprobs, attn_weights
def nopeak_mask(self, size):
np_mask = np.triu(np.ones((1, size, size)), k=1).astype('uint8')
np_mask = Variable(torch.from_numpy(np_mask) == 0).cuda()
return np_mask
| 44.762712 | 166 | 0.683453 | [
"MIT"
] | riokt/video-paragraph | modules/transformer.py | 5,282 | Python |
def par_UCT(rootstate, rootnode, itermax):
print('hi')
for i in range(0):
node = rootnode
state = rootstate.clone()
# Select
while node.untriedMoves == [] and node.childNodes != []: # node is fully expanded and non-terminal
node = node.UCTSelectChild()
state.do_move(node.move)
# Expand
if node.untriedMoves != []: # if we can expand (i.e. state/node is non-terminal)
m = random.choice(node.untriedMoves)
state.do_move(m)
node = node.AddChild(m,state) # add child and descend tree
# Rollout - this can often be made orders of magnitude quicker using a state.GetRandomMove() function
while state.get_moves() != []: # while state is non-terminal
# print('---------')
# print(state.credits)
# print(state._get_player_turn())
# print(state.get_moves())
# print(state.moves_taken)
# probs = [1 for x in state.get_moves()]
# if(5 in state.get_moves()):
# probs[-1] -= .5
state.do_move(random.choice(state.get_moves()))
# Backpropagate
while node != None: # backpropagate from the expanded node and work back to the root node
node.Update(state.get_result(node.playerJustMoved)) # state is terminal. Update node with result from POV of node.playerJustMoved
node = node.parentNode
# Output some information about the tree - can be omitted
if (verbose): print(rootnode.TreeToString(0))
else:
# print(rootnode.ChildrenToString())
pass
# determine general performance of hand
return sorted(rootnode.childNodes, key = lambda c: c.visits)[-1].move | 44.209302 | 145 | 0.557601 | [
"MIT"
] | guy477/Poker | Version2/ivan/rlcard/rlcard/agents/mcmphelp.py | 1,901 | Python |
import pytrec_eval
from repro_eval.util import trim, break_ties
from repro_eval.measure.statistics import ttest
from repro_eval.measure.overall_effects import ER, deltaRI
from repro_eval.measure.document_order import ktau_union as ktu, RBO
from repro_eval.measure.effectiveness import rmse as RMSE, nrmse as nRMSE
from repro_eval.config import ERR_MSG
class Evaluator(object):
"""
An abstract evaluator that holds the original baseline and advanced run as well as
the reproduced/replicated baseline and advanced run.
"""
def __init__(self, **kwargs):
self.qrel_orig_path = kwargs.get('qrel_orig_path', None)
self.run_b_orig_path = kwargs.get('run_b_orig_path', None)
self.run_a_orig_path = kwargs.get('run_a_orig_path', None)
self.run_b_rep_path = kwargs.get('run_b_rep_path', None)
self.run_a_rep_path = kwargs.get('run_a_rep_path', None)
self.run_b_orig = None
self.run_a_orig = None
self.run_b_rep = None
self.run_a_rep = None
self.run_b_orig_score = None
self.run_a_orig_score = None
self.run_b_rep_score = None
self.run_a_rep_score = None
if self.qrel_orig_path:
with open(self.qrel_orig_path, 'r') as f_qrel:
qrel_orig = pytrec_eval.parse_qrel(f_qrel)
self.rel_eval = pytrec_eval.RelevanceEvaluator(qrel_orig, pytrec_eval.supported_measures)
if self.run_b_orig_path:
with open(self.run_b_orig_path, 'r') as f_run:
self.run_b_orig = pytrec_eval.parse_run(f_run)
self.run_b_orig = {t: self.run_b_orig[t] for t in sorted(self.run_b_orig)}
if self.run_a_orig_path:
with open(self.run_a_orig_path, 'r') as f_run:
self.run_a_orig = pytrec_eval.parse_run(f_run)
self.run_a_orig = {t: self.run_a_orig[t] for t in sorted(self.run_a_orig)}
if self.run_b_rep_path:
with open(self.run_b_rep_path, 'r') as f_run:
self.run_b_rep = pytrec_eval.parse_run(f_run)
self.run_b_rep = {t: self.run_b_rep[t] for t in sorted(self.run_b_rep)}
if self.run_a_rep_path:
with open(self.run_a_rep_path, 'r') as f_run:
self.run_a_rep = pytrec_eval.parse_run(f_run)
self.run_a_rep = {t: self.run_a_rep[t] for t in sorted(self.run_a_rep)}
def trim(self, t=None, run=None):
"""
Trims all runs of the Evaluator to the length specified by the threshold value t.
@param t: Threshold parameter or number of top-k documents to be considered.
@param run: If run is not None, only the provided run will be trimmed.
"""
if run:
run = break_ties(run)
if t:
trim(run, thresh=t)
else:
trim(run)
return
if self.run_b_orig:
self.run_b_orig = break_ties(self.run_b_orig)
if t:
trim(self.run_b_orig, thresh=t)
else:
trim(self.run_b_orig)
if self.run_a_orig:
self.run_a_orig = break_ties(self.run_a_orig)
if t:
trim(self.run_a_orig, thresh=t)
else:
trim(self.run_a_orig)
if self.run_b_rep:
self.run_b_rep = break_ties(self.run_b_rep)
if t:
trim(self.run_b_rep, thresh=t)
else:
trim(self.run_b_rep)
if self.run_a_rep:
self.run_a_rep = break_ties(self.run_a_rep)
if t:
trim(self.run_a_rep, thresh=t)
else:
trim(self.run_a_rep)
def evaluate(self, run=None):
"""
Evaluates the original baseline and advanced run if available.
@param run: Reproduced or replicated run that will be evaluated.
"""
if self.run_b_orig:
self.run_b_orig = break_ties(self.run_b_orig)
self.run_b_orig_score = self.rel_eval.evaluate(self.run_b_orig)
if self.run_a_orig:
self.run_a_orig = break_ties(self.run_a_orig)
self.run_a_orig_score = self.rel_eval.evaluate(self.run_a_orig)
def er(self, run_b_score=None, run_a_score=None, run_b_path=None, run_a_path=None, print_feedback=False):
"""
Determines the Effect Ratio (ER) according to the following paper:
Timo Breuer, Nicola Ferro, Norbert Fuhr, Maria Maistro, Tetsuya Sakai, Philipp Schaer, Ian Soboroff.
How to Measure the Reproducibility of System-oriented IR Experiments.
Proceedings of SIGIR, pages 349-358, 2020.
The ER value is determined by the ratio between the mean improvements
of the original and reproduced/replicated experiments.
@param run_b_score: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_score: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@return: Dictionary containing the ER values for the specified run combination.
"""
if print_feedback:
print('Determining Effect Ratio (ER)')
if self.run_b_orig_score and self.run_a_orig_score and run_b_path and run_a_path:
with open(run_b_path, 'r') as b_run, open(run_a_path, 'r') as a_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval_rpl.evaluate(run_b_rep) if hasattr(self, 'rel_eval_rpl') else self.rel_eval.evaluate(run_b_rep)
run_a_rep = pytrec_eval.parse_run(a_run)
run_a_rep = {t: run_a_rep[t] for t in sorted(run_a_rep)}
run_a_rep_score = self.rel_eval_rpl.evaluate(run_a_rep) if hasattr(self, 'rel_eval_rpl') else self.rel_eval.evaluate(run_a_rep)
return ER(orig_score_b=self.run_b_orig_score, orig_score_a=self.run_a_orig_score,
rep_score_b=run_b_rep_score, rep_score_a=run_a_rep_score, pbar=print_feedback)
if self.run_b_orig_score and self.run_a_orig_score and run_b_score and run_a_score:
return ER(orig_score_b=self.run_b_orig_score, orig_score_a=self.run_a_orig_score,
rep_score_b=run_b_score, rep_score_a=run_a_score, pbar=print_feedback)
if self.run_b_orig_score and self.run_a_orig_score and self.run_b_rep_score and self.run_a_rep_score:
return ER(orig_score_b=self.run_b_orig_score, orig_score_a=self.run_a_orig_score,
rep_score_b=self.run_b_rep_score, rep_score_a=self.run_a_rep_score, pbar=print_feedback)
else:
print(ERR_MSG)
def dri(self, run_b_score=None, run_a_score=None, run_b_path=None, run_a_path=None, print_feedback=False):
"""
Determines the Delta Relative Improvement (DeltaRI) according to the following paper:
Timo Breuer, Nicola Ferro, Norbert Fuhr, Maria Maistro, Tetsuya Sakai, Philipp Schaer, Ian Soboroff.
How to Measure the Reproducibility of System-oriented IR Experiments.
Proceedings of SIGIR, pages 349-358, 2020.
The DeltaRI value is determined by the difference between the relative improvements
of the original and reproduced/replicated experiments.
@param run_b_score: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_score: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@return: Dictionary containing the DRI values for the specified run combination.
"""
if print_feedback:
print('Determining Delta Relative Improvement (DRI)')
if self.run_b_orig_score and self.run_a_orig_score and run_b_path and run_a_path:
with open(run_b_path, 'r') as b_run, open(run_a_path, 'r') as a_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval_rpl.evaluate(run_b_rep) if hasattr(self, 'rel_eval_rpl') else self.rel_eval.evaluate(run_b_rep)
run_a_rep = pytrec_eval.parse_run(a_run)
run_a_rep = {t: run_a_rep[t] for t in sorted(run_a_rep)}
run_a_rep_score = self.rel_eval_rpl.evaluate(run_a_rep) if hasattr(self, 'rel_eval_rpl') else self.rel_eval.evaluate(run_a_rep)
return deltaRI(orig_score_b=self.run_b_orig_score, orig_score_a=self.run_a_orig_score,
rep_score_b=run_b_rep_score, rep_score_a=run_a_rep_score, pbar=print_feedback)
if self.run_b_orig_score and self.run_a_orig_score and run_b_score and run_a_score:
return deltaRI(orig_score_b=self.run_b_orig_score, orig_score_a=self.run_a_orig_score,
rep_score_b=run_b_score, rep_score_a=run_a_score, pbar=print_feedback)
if self.run_b_orig_score and self.run_a_orig_score and self.run_b_rep_score and self.run_a_rep_score:
return deltaRI(orig_score_b=self.run_b_orig_score, orig_score_a=self.run_a_orig_score,
rep_score_b=self.run_b_rep_score, rep_score_a=self.run_a_rep_score, pbar=print_feedback)
else:
print(ERR_MSG)
def _ttest(self, rpd=True, run_b_score=None, run_a_score=None, print_feedback=False):
"""
Conducts either a paired (reproducibility) or unpaired (replicability) two-sided t-test according to the following paper:
Timo Breuer, Nicola Ferro, Norbert Fuhr, Maria Maistro, Tetsuya Sakai, Philipp Schaer, Ian Soboroff.
How to Measure the Reproducibility of System-oriented IR Experiments.
Proceedings of SIGIR, pages 349-358, 2020.
@param rpd: Boolean indicating if the evaluated runs are reproduced.
@param run_b_score: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_score: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@return: Dictionary with p-values that compare the score distributions of the baseline and advanced run.
"""
if self.run_b_orig_score and (self.run_b_rep_score or run_b_score):
if run_b_score and run_a_score:
if print_feedback:
print('Determining p-values of t-test for baseline and advanced run.')
return {'baseline': ttest(self.run_b_orig_score, run_b_score, rpd=rpd, pbar=print_feedback),
'advanced': ttest(self.run_a_orig_score, run_a_score, rpd=rpd, pbar=print_feedback)}
if run_b_score:
if print_feedback:
print('Determining p-values of t-test for baseline run.')
return {'baseline': ttest(self.run_b_orig_score, run_b_score, rpd=rpd, pbar=print_feedback)}
if self.run_a_orig_score and self.run_a_rep_score:
if print_feedback:
print('Determining p-values of t-test for baseline and advanced run.')
return {'baseline': ttest(self.run_b_orig_score, self.run_b_rep_score, rpd=rpd, pbar=print_feedback),
'advanced': ttest(self.run_a_orig_score, self.run_a_rep_score, rpd=rpd, pbar=print_feedback)}
else:
if print_feedback:
print('Determining p-values of t-test for baseline run.')
return {'baseline': ttest(self.run_b_orig_score, self.run_b_rep_score, rpd=rpd, pbar=print_feedback)}
else:
print(ERR_MSG)
class RpdEvaluator(Evaluator):
"""
The Reproducibility Evaluator is used for quantifying the different levels of reproduction for runs that were
derived from the same test collection used in the original experiment.
"""
def evaluate(self, run=None):
"""
Evaluates the scores of the original and reproduced baseline and advanced runs.
If a (reproduced) run is provided only this one will be evaluated and a dictionary with the corresponding
scores is returned.
@param run: A reproduced run. If not specified, the original and reproduced runs of the the RpdEvaluator will
be used instead.
@return: If run is specified, a dictionary with the corresponding scores is returned.
"""
if run:
return self.rel_eval.evaluate(run)
super(RpdEvaluator, self).evaluate()
if self.run_b_rep:
self.run_b_rep = break_ties(self.run_b_rep)
self.run_b_rep_score = self.rel_eval.evaluate(self.run_b_rep)
if self.run_a_rep:
self.run_a_rep = break_ties(self.run_a_rep)
self.run_a_rep_score = self.rel_eval.evaluate(self.run_a_rep)
def ktau_union(self, run_b_rep=None, run_a_rep=None, run_b_path=None, run_a_path=None, print_feedback=False):
"""
Determines Kendall's tau Union (KTU) between the original and reproduced document orderings
according to the following paper:
Timo Breuer, Nicola Ferro, Norbert Fuhr, Maria Maistro, Tetsuya Sakai, Philipp Schaer, Ian Soboroff.
How to Measure the Reproducibility of System-oriented IR Experiments.
Proceedings of SIGIR, pages 349-358, 2020.
@param run_b_rep: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_rep: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_b_path: Path to another reproduced baseline run,
if not provided the reproduced baseline run of the RpdEvaluator object will be used instead.
@param run_a_path: Path to another reproduced advanced run,
if not provided the reproduced advanced run of the RpdEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@return: Dictionary with KTU values that compare the document orderings of the original and reproduced runs.
"""
if self.run_b_orig and run_b_path:
if self.run_a_orig and run_a_path:
if print_feedback:
print("Determining Kendall's tau Union (KTU) for baseline and advanced run.")
with open(run_b_path, 'r') as b_run, open(run_a_path, 'r') as a_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_a_rep = pytrec_eval.parse_run(a_run)
run_a_rep = {t: run_a_rep[t] for t in sorted(run_a_rep)}
return {'baseline': ktu(self.run_b_orig, run_b_rep, pbar=print_feedback),
'advanced': ktu(self.run_a_orig, run_a_rep, pbar=print_feedback)}
else:
if print_feedback:
print("Determining Kendall's tau Union (KTU) for baseline run.")
with open(run_b_path, 'r') as b_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
return {'baseline': ktu(self.run_b_orig, run_b_rep, pbar=print_feedback)}
if self.run_b_orig and run_b_rep:
if self.run_a_orig and run_a_rep:
if print_feedback:
print("Determining Kendall's tau Union (KTU) for baseline and advanced run.")
return {'baseline': ktu(self.run_b_orig, run_b_rep, pbar=print_feedback),
'advanced': ktu(self.run_a_orig, run_a_rep, pbar=print_feedback)}
else:
if print_feedback:
print("Determining Kendall's tau Union (KTU) for baseline run.")
return {'baseline': ktu(self.run_b_orig, run_b_rep, pbar=print_feedback)}
if self.run_b_orig and self.run_b_rep:
if self.run_a_orig and self.run_a_rep:
if print_feedback:
print("Determining Kendall's tau Union (KTU) for baseline and advanced run.")
return {'baseline': ktu(self.run_b_orig, self.run_b_rep, pbar=print_feedback),
'advanced': ktu(self.run_a_orig, self.run_a_rep, pbar=print_feedback)}
else:
if print_feedback:
print("Determining Kendall's tau Union (KTU) for baseline run.")
return {'baseline': ktu(self.run_b_orig, self.run_b_rep, pbar=print_feedback)}
else:
print(ERR_MSG)
def rbo(self, run_b_rep=None, run_a_rep=None, run_b_path=None, run_a_path=None, print_feedback=False, misinfo=True):
"""
Determines the Rank-Biased Overlap (RBO) between the original and reproduced document orderings
according to the following paper:
Timo Breuer, Nicola Ferro, Norbert Fuhr, Maria Maistro, Tetsuya Sakai, Philipp Schaer, Ian Soboroff.
How to Measure the Reproducibility of System-oriented IR Experiments.
Proceedings of SIGIR, pages 349-358, 2020.
@param run_b_rep: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_rep: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_b_path: Path to another reproduced baseline run,
if not provided the reproduced baseline run of the RpdEvaluator object will be used instead.
@param run_a_path: Path to another reproduced advanced run,
if not provided the reproduced advanced run of the RpdEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@param misinfo: Use the RBO implementation that is also used in the TREC Health Misinformation Track.
See also: https://github.com/claclark/Compatibility
@return: Dictionary with RBO values that compare the document orderings of the original and reproduced runs.
"""
if self.run_b_orig and run_b_path:
if self.run_a_orig and run_a_path:
if print_feedback:
print("Determining Rank-biased Overlap (RBO) for baseline and advanced run.")
with open(run_b_path, 'r') as b_run, open(run_a_path, 'r') as a_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_a_rep = pytrec_eval.parse_run(a_run)
run_a_rep = {t: run_a_rep[t] for t in sorted(run_a_rep)}
return {'baseline': RBO(self.run_b_orig, run_b_rep, pbar=print_feedback, misinfo=misinfo),
'advanced': RBO(self.run_a_orig, run_a_rep, pbar=print_feedback, misinfo=misinfo)}
else:
if print_feedback:
print("Determining Rank-biased Overlap (RBO) for baseline run.")
with open(run_b_path, 'r') as b_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
return {'baseline': RBO(self.run_b_orig, run_b_rep, pbar=print_feedback, misinfo=misinfo)}
if self.run_b_orig and run_b_rep:
if self.run_a_orig and run_a_rep:
if print_feedback:
print("Determining Rank-biased Overlap (RBO) for baseline and advanced run.")
return {'baseline': RBO(self.run_b_orig, run_b_rep, pbar=print_feedback, misinfo=misinfo),
'advanced': RBO(self.run_a_orig, run_a_rep, pbar=print_feedback, misinfo=misinfo)}
else:
if print_feedback:
print("Determining Rank-biased Overlap (RBO) for baseline run.")
return {'baseline': RBO(self.run_b_orig, run_b_rep, pbar=print_feedback, misinfo=misinfo)}
if self.run_b_orig and self.run_b_rep:
if self.run_a_orig and self.run_a_rep:
if print_feedback:
print("Determining Rank-biased Overlap (RBO) for baseline and advanced run.")
return {'baseline': RBO(self.run_b_orig, self.run_b_rep, pbar=print_feedback, misinfo=misinfo),
'advanced': RBO(self.run_a_orig, self.run_a_rep, pbar=print_feedback, misinfo=misinfo)}
else:
if print_feedback:
print("Determining Rank-biased Overlap (RBO) for baseline run.")
return {'baseline': RBO(self.run_b_orig, self.run_b_rep, pbar=print_feedback, misinfo=misinfo)}
else:
print(ERR_MSG)
def rmse(self, run_b_score=None, run_a_score=None, run_b_path=None, run_a_path=None, print_feedback=False):
"""
Determines the Root Mean Square Error (RMSE) according to the following paper:
Timo Breuer, Nicola Ferro, Norbert Fuhr, Maria Maistro, Tetsuya Sakai, Philipp Schaer, Ian Soboroff.
How to Measure the Reproducibility of System-oriented IR Experiments.
Proceedings of SIGIR, pages 349-358, 2020.
@param run_b_score: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_score: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_b_path: Path to another reproduced baseline run,
if not provided the reproduced baseline run of the RpdEvaluator object will be used instead.
@param run_a_path: Path to another reproduced advanced run,
if not provided the reproduced advanced run of the RpdEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@return: Dictionary with RMSE values that measure the closeness
between the topics scores of the original and reproduced runs.
"""
if self.run_b_orig and run_b_path:
if self.run_a_orig and run_a_path:
if print_feedback:
print("Determining Root Mean Square Error (RMSE) for baseline and advanced run.")
with open(run_b_path, 'r') as b_run, open(run_a_path, 'r') as a_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval.evaluate(run_b_rep)
run_a_rep = pytrec_eval.parse_run(a_run)
run_a_rep = {t: run_a_rep[t] for t in sorted(run_a_rep)}
run_a_rep_score = self.rel_eval.evaluate(run_a_rep)
return {'baseline': RMSE(self.run_b_orig_score, run_b_rep_score, pbar=print_feedback),
'advanced': RMSE(self.run_a_orig_score, run_a_rep_score, pbar=print_feedback)}
else:
if print_feedback:
print("Determining Root Mean Square Error (RMSE) for baseline run.")
with open(run_b_path, 'r') as b_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval.evaluate(run_b_rep)
return {'baseline': RMSE(self.run_b_orig_score, run_b_rep_score, pbar=print_feedback)}
if self.run_b_orig_score and run_b_score:
if self.run_a_orig_score and run_a_score:
if print_feedback:
print("Determining Root Mean Square Error (RMSE) for baseline and advanced run.")
return {'baseline': RMSE(self.run_b_orig_score, run_b_score, pbar=print_feedback),
'advanced': RMSE(self.run_a_orig_score, run_a_score, pbar=print_feedback)}
else:
if print_feedback:
print("Determining Root Mean Square Error (RMSE) for baseline run.")
return {'baseline': RMSE(self.run_b_orig_score, run_b_score, pbar=print_feedback)}
if self.run_b_orig_score and self.run_b_rep_score:
if self.run_a_orig_score and self.run_a_rep_score:
if print_feedback:
print("Determining Root Mean Square Error (RMSE) for baseline and advanced run.")
return {'baseline': RMSE(self.run_b_orig_score, self.run_b_rep_score, pbar=print_feedback),
'advanced': RMSE(self.run_a_orig_score, self.run_a_rep_score, pbar=print_feedback)}
else:
if print_feedback:
print("Determining Root Mean Square Error (RMSE) for baseline run.")
return {'baseline': RMSE(self.run_b_orig_score, self.run_b_rep_score, pbar=print_feedback)}
else:
print(ERR_MSG)
def nrmse(self, run_b_score=None, run_a_score=None, run_b_path=None, run_a_path=None, print_feedback=False):
"""
Determines the normalized Root Mean Square Error (RMSE).
@param run_b_score: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_score: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_b_path: Path to another reproduced baseline run,
if not provided the reproduced baseline run of the RpdEvaluator object will be used instead.
@param run_a_path: Path to another reproduced advanced run,
if not provided the reproduced advanced run of the RpdEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@return: Dictionary with nRMSE values that measure the closeness
between the topics scores of the original and reproduced runs.
"""
if self.run_b_orig and run_b_path:
if self.run_a_orig and run_a_path:
if print_feedback:
print("Determining normalized Root Mean Square Error (RMSE) for baseline and advanced run.")
with open(run_b_path, 'r') as b_run, open(run_a_path, 'r') as a_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval.evaluate(run_b_rep)
run_a_rep = pytrec_eval.parse_run(a_run)
run_a_rep = {t: run_a_rep[t] for t in sorted(run_a_rep)}
run_a_rep_score = self.rel_eval.evaluate(run_a_rep)
return {'baseline': nRMSE(self.run_b_orig_score, run_b_rep_score, pbar=print_feedback),
'advanced': nRMSE(self.run_a_orig_score, run_a_rep_score, pbar=print_feedback)}
else:
if print_feedback:
print("Determining normalized Root Mean Square Error (RMSE) for baseline run.")
with open(run_b_path, 'r') as b_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval.evaluate(run_b_rep)
return {'baseline': nRMSE(self.run_b_orig_score, run_b_rep_score, pbar=print_feedback)}
if self.run_b_orig_score and run_b_score:
if self.run_a_orig_score and run_a_score:
if print_feedback:
print("Determining normalized Root Mean Square Error (RMSE) for baseline and advanced run.")
return {'baseline': nRMSE(self.run_b_orig_score, run_b_score, pbar=print_feedback),
'advanced': nRMSE(self.run_a_orig_score, run_a_score, pbar=print_feedback)}
else:
if print_feedback:
print("Determining normalized Root Mean Square Error (RMSE) for baseline run.")
return {'baseline': nRMSE(self.run_b_orig_score, run_b_score, pbar=print_feedback)}
if self.run_b_orig_score and self.run_b_rep_score:
if self.run_a_orig_score and self.run_a_rep_score:
if print_feedback:
print("Determining Root Mean Square Error (RMSE) for baseline and advanced run.")
return {'baseline': nRMSE(self.run_b_orig_score, self.run_b_rep_score, pbar=print_feedback),
'advanced': nRMSE(self.run_a_orig_score, self.run_a_rep_score, pbar=print_feedback)}
else:
if print_feedback:
print("Determining normalized Root Mean Square Error (RMSE) for baseline run.")
return {'baseline': nRMSE(self.run_b_orig_score, self.run_b_rep_score, pbar=print_feedback)}
else:
print(ERR_MSG)
def ttest(self, run_b_score=None, run_a_score=None, run_b_path=None, run_a_path=None, print_feedback=False):
"""
Conducts a paired two-tailed t-test for reproduced runs that were derived from the same test collection
as in the original experiment.
@param run_b_score: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_score: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_b_path: Path to another reproduced baseline run,
if not provided the reproduced baseline run of the RpdEvaluator object will be used instead.
@param run_a_path: Path to another reproduced advanced run,
if not provided the reproduced advanced run of the RpdEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@return: Dictionary with p-values that compare the score distributions of the baseline and advanced run.
"""
if run_b_path:
if run_a_path:
with open(run_b_path, 'r') as b_run, open(run_a_path, 'r') as a_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval.evaluate(run_b_rep)
run_a_rep = pytrec_eval.parse_run(a_run)
run_a_rep = {t: run_a_rep[t] for t in sorted(run_a_rep)}
run_a_rep_score = self.rel_eval.evaluate(run_a_rep)
return self._ttest(run_b_score=run_b_rep_score, run_a_score=run_a_rep_score, print_feedback=print_feedback)
else:
with open(run_b_path, 'r') as b_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval.evaluate(run_b_rep)
return self._ttest(run_b_score=run_b_rep_score, run_a_score=None, print_feedback=print_feedback)
return self._ttest(run_b_score=run_b_score, run_a_score=run_a_score, print_feedback=print_feedback)
class RplEvaluator(Evaluator):
"""
The Replicability Evaluator is used for quantifying the different levels of replication for runs that were
derived from a test collection not used in the original experiment.
"""
def __init__(self, **kwargs):
super(RplEvaluator, self).__init__(**kwargs)
self.qrel_rpl_path = kwargs.get('qrel_rpl_path', None)
if self.qrel_rpl_path:
with open(self.qrel_rpl_path, 'r') as f_qrel:
qrel_rpl = pytrec_eval.parse_qrel(f_qrel)
self.rel_eval_rpl = pytrec_eval.RelevanceEvaluator(qrel_rpl, pytrec_eval.supported_measures)
def evaluate(self, run=None):
"""
Evaluates the scores of the original and replicated baseline and advanced runs.
If a (replicated) run is provided only this one will be evaluated and a dictionary with the corresponding
scores is returned.
@param run: A replicated run. If not specified, the original and replicated runs of the the RplEvaluator will
be used instead.
@return: If run is specified, a dictionary with the corresponding scores is returned.
"""
if run:
return self.rel_eval_rpl.evaluate(run)
super(RplEvaluator, self).evaluate()
if self.run_b_rep:
self.run_b_rep = break_ties(self.run_b_rep)
self.run_b_rep_score = self.rel_eval_rpl.evaluate(self.run_b_rep)
if self.run_a_rep:
self.run_a_rep = break_ties(self.run_a_rep)
self.run_a_rep_score = self.rel_eval_rpl.evaluate(self.run_a_rep)
def ttest(self, run_b_score=None, run_a_score=None, run_b_path=None, run_a_path=None, print_feedback=False):
"""
Conducts an un-paired two-tailed t-test for replicated runs that were derived from a test collection
not used in the original experiment.
@param run_b_score: Scores of the baseline run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_a_score: Scores of the advanced run,
if not provided the scores of the RpdEvaluator object will be used instead.
@param run_b_path: Path to another replicated baseline run,
if not provided the replicated baseline run of the RplEvaluator object will be used instead.
@param run_a_path: Path to another replicated advanced run,
if not provided the replicated advanced run of the RplEvaluator object will be used instead.
@param print_feedback: Boolean value indicating if feedback on progress should be printed.
@return: Dictionary with p-values that compare the score distributions of the baseline and advanced run.
"""
if run_b_path:
if run_a_path:
with open(run_b_path, 'r') as b_run, open(run_a_path, 'r') as a_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval_rpl.evaluate(run_b_rep)
run_a_rep = pytrec_eval.parse_run(a_run)
run_a_rep = {t: run_a_rep[t] for t in sorted(run_a_rep)}
run_a_rep_score = self.rel_eval_rpl.evaluate(run_a_rep)
return self._ttest(rpd=False, run_b_score=run_b_rep_score, run_a_score=run_a_rep_score, print_feedback=print_feedback)
else:
with open(run_b_path, 'r') as b_run:
run_b_rep = pytrec_eval.parse_run(b_run)
run_b_rep = {t: run_b_rep[t] for t in sorted(run_b_rep)}
run_b_rep_score = self.rel_eval_rpl.evaluate(run_b_rep)
return self._ttest(rpd=False, run_b_score=run_b_rep_score, run_a_score=None, print_feedback=print_feedback)
return self._ttest(rpd=False, run_b_score=run_b_score, run_a_score=run_a_score, print_feedback=print_feedback)
| 59.723856 | 143 | 0.643019 | [
"MIT"
] | irgroup/repro_eval | repro_eval/Evaluator.py | 36,551 | Python |
"""
Module for applying conditional formatting to
DataFrames and Series.
"""
from collections import defaultdict
from contextlib import contextmanager
import copy
from functools import partial
from itertools import product
from uuid import uuid1
import numpy as np
from pandas.compat import range
from pandas.util._decorators import Appender
from pandas.core.dtypes.common import is_float, is_string_like
from pandas.core.dtypes.generic import ABCSeries
import pandas as pd
from pandas.api.types import is_dict_like, is_list_like
import pandas.core.common as com
from pandas.core.config import get_option
from pandas.core.generic import _shared_docs
from pandas.core.indexing import _maybe_numeric_slice, _non_reducing_slice
try:
from jinja2 import (
PackageLoader, Environment, ChoiceLoader, FileSystemLoader
)
except ImportError:
raise ImportError("pandas.Styler requires jinja2. "
"Please install with `conda install Jinja2`\n"
"or `pip install Jinja2`")
try:
import matplotlib.pyplot as plt
from matplotlib import colors
has_mpl = True
except ImportError:
has_mpl = False
no_mpl_message = "{0} requires matplotlib."
@contextmanager
def _mpl(func):
if has_mpl:
yield plt, colors
else:
raise ImportError(no_mpl_message.format(func.__name__))
class Styler(object):
"""
Helps style a DataFrame or Series according to the data with HTML and CSS.
Parameters
----------
data : Series or DataFrame
precision : int
precision to round floats to, defaults to pd.options.display.precision
table_styles : list-like, default None
list of {selector: (attr, value)} dicts; see Notes
uuid : str, default None
a unique identifier to avoid CSS collisions; generated automatically
caption : str, default None
caption to attach to the table
cell_ids : bool, default True
If True, each cell will have an ``id`` attribute in their HTML tag.
The ``id`` takes the form ``T_<uuid>_row<num_row>_col<num_col>``
where ``<uuid>`` is the unique identifier, ``<num_row>`` is the row
number and ``<num_col>`` is the column number.
Attributes
----------
env : Jinja2 Environment
template : Jinja2 Template
loader : Jinja2 Loader
See Also
--------
pandas.DataFrame.style
Notes
-----
Most styling will be done by passing style functions into
``Styler.apply`` or ``Styler.applymap``. Style functions should
return values with strings containing CSS ``'attr: value'`` that will
be applied to the indicated cells.
If using in the Jupyter notebook, Styler has defined a ``_repr_html_``
to automatically render itself. Otherwise call Styler.render to get
the generated HTML.
CSS classes are attached to the generated HTML
* Index and Column names include ``index_name`` and ``level<k>``
where `k` is its level in a MultiIndex
* Index label cells include
* ``row_heading``
* ``row<n>`` where `n` is the numeric position of the row
* ``level<k>`` where `k` is the level in a MultiIndex
* Column label cells include
* ``col_heading``
* ``col<n>`` where `n` is the numeric position of the column
* ``evel<k>`` where `k` is the level in a MultiIndex
* Blank cells include ``blank``
* Data cells include ``data``
"""
loader = PackageLoader("pandas", "io/formats/templates")
env = Environment(
loader=loader,
trim_blocks=True,
)
template = env.get_template("html.tpl")
def __init__(self, data, precision=None, table_styles=None, uuid=None,
caption=None, table_attributes=None, cell_ids=True):
self.ctx = defaultdict(list)
self._todo = []
if not isinstance(data, (pd.Series, pd.DataFrame)):
raise TypeError("``data`` must be a Series or DataFrame")
if data.ndim == 1:
data = data.to_frame()
if not data.index.is_unique or not data.columns.is_unique:
raise ValueError("style is not supported for non-unique indices.")
self.data = data
self.index = data.index
self.columns = data.columns
self.uuid = uuid
self.table_styles = table_styles
self.caption = caption
if precision is None:
precision = get_option('display.precision')
self.precision = precision
self.table_attributes = table_attributes
self.hidden_index = False
self.hidden_columns = []
self.cell_ids = cell_ids
# display_funcs maps (row, col) -> formatting function
def default_display_func(x):
if is_float(x):
return '{:>.{precision}g}'.format(x, precision=self.precision)
else:
return x
self._display_funcs = defaultdict(lambda: default_display_func)
def _repr_html_(self):
"""
Hooks into Jupyter notebook rich display system.
"""
return self.render()
@Appender(_shared_docs['to_excel'] % dict(
axes='index, columns', klass='Styler',
axes_single_arg="{0 or 'index', 1 or 'columns'}",
optional_by="""
by : str or list of str
Name or list of names which refer to the axis items.""",
versionadded_to_excel='\n .. versionadded:: 0.20'))
def to_excel(self, excel_writer, sheet_name='Sheet1', na_rep='',
float_format=None, columns=None, header=True, index=True,
index_label=None, startrow=0, startcol=0, engine=None,
merge_cells=True, encoding=None, inf_rep='inf', verbose=True,
freeze_panes=None):
from pandas.io.formats.excel import ExcelFormatter
formatter = ExcelFormatter(self, na_rep=na_rep, cols=columns,
header=header,
float_format=float_format, index=index,
index_label=index_label,
merge_cells=merge_cells,
inf_rep=inf_rep)
formatter.write(excel_writer, sheet_name=sheet_name, startrow=startrow,
startcol=startcol, freeze_panes=freeze_panes,
engine=engine)
def _translate(self):
"""
Convert the DataFrame in `self.data` and the attrs from `_build_styles`
into a dictionary of {head, body, uuid, cellstyle}.
"""
table_styles = self.table_styles or []
caption = self.caption
ctx = self.ctx
precision = self.precision
hidden_index = self.hidden_index
hidden_columns = self.hidden_columns
uuid = self.uuid or str(uuid1()).replace("-", "_")
ROW_HEADING_CLASS = "row_heading"
COL_HEADING_CLASS = "col_heading"
INDEX_NAME_CLASS = "index_name"
DATA_CLASS = "data"
BLANK_CLASS = "blank"
BLANK_VALUE = ""
def format_attr(pair):
return "{key}={value}".format(**pair)
# for sparsifying a MultiIndex
idx_lengths = _get_level_lengths(self.index)
col_lengths = _get_level_lengths(self.columns, hidden_columns)
cell_context = dict()
n_rlvls = self.data.index.nlevels
n_clvls = self.data.columns.nlevels
rlabels = self.data.index.tolist()
clabels = self.data.columns.tolist()
if n_rlvls == 1:
rlabels = [[x] for x in rlabels]
if n_clvls == 1:
clabels = [[x] for x in clabels]
clabels = list(zip(*clabels))
cellstyle = []
head = []
for r in range(n_clvls):
# Blank for Index columns...
row_es = [{"type": "th",
"value": BLANK_VALUE,
"display_value": BLANK_VALUE,
"is_visible": not hidden_index,
"class": " ".join([BLANK_CLASS])}] * (n_rlvls - 1)
# ... except maybe the last for columns.names
name = self.data.columns.names[r]
cs = [BLANK_CLASS if name is None else INDEX_NAME_CLASS,
"level{lvl}".format(lvl=r)]
name = BLANK_VALUE if name is None else name
row_es.append({"type": "th",
"value": name,
"display_value": name,
"class": " ".join(cs),
"is_visible": not hidden_index})
if clabels:
for c, value in enumerate(clabels[r]):
cs = [COL_HEADING_CLASS, "level{lvl}".format(lvl=r),
"col{col}".format(col=c)]
cs.extend(cell_context.get(
"col_headings", {}).get(r, {}).get(c, []))
es = {
"type": "th",
"value": value,
"display_value": value,
"class": " ".join(cs),
"is_visible": _is_visible(c, r, col_lengths),
}
colspan = col_lengths.get((r, c), 0)
if colspan > 1:
es["attributes"] = [
format_attr({"key": "colspan", "value": colspan})
]
row_es.append(es)
head.append(row_es)
if (self.data.index.names and
com._any_not_none(*self.data.index.names) and
not hidden_index):
index_header_row = []
for c, name in enumerate(self.data.index.names):
cs = [INDEX_NAME_CLASS,
"level{lvl}".format(lvl=c)]
name = '' if name is None else name
index_header_row.append({"type": "th", "value": name,
"class": " ".join(cs)})
index_header_row.extend(
[{"type": "th",
"value": BLANK_VALUE,
"class": " ".join([BLANK_CLASS])
}] * (len(clabels[0]) - len(hidden_columns)))
head.append(index_header_row)
body = []
for r, idx in enumerate(self.data.index):
row_es = []
for c, value in enumerate(rlabels[r]):
rid = [ROW_HEADING_CLASS, "level{lvl}".format(lvl=c),
"row{row}".format(row=r)]
es = {
"type": "th",
"is_visible": (_is_visible(r, c, idx_lengths) and
not hidden_index),
"value": value,
"display_value": value,
"id": "_".join(rid[1:]),
"class": " ".join(rid)
}
rowspan = idx_lengths.get((c, r), 0)
if rowspan > 1:
es["attributes"] = [
format_attr({"key": "rowspan", "value": rowspan})
]
row_es.append(es)
for c, col in enumerate(self.data.columns):
cs = [DATA_CLASS, "row{row}".format(row=r),
"col{col}".format(col=c)]
cs.extend(cell_context.get("data", {}).get(r, {}).get(c, []))
formatter = self._display_funcs[(r, c)]
value = self.data.iloc[r, c]
row_dict = {"type": "td",
"value": value,
"class": " ".join(cs),
"display_value": formatter(value),
"is_visible": (c not in hidden_columns)}
# only add an id if the cell has a style
if (self.cell_ids or
not(len(ctx[r, c]) == 1 and ctx[r, c][0] == '')):
row_dict["id"] = "_".join(cs[1:])
row_es.append(row_dict)
props = []
for x in ctx[r, c]:
# have to handle empty styles like ['']
if x.count(":"):
props.append(x.split(":"))
else:
props.append(['', ''])
cellstyle.append({'props': props,
'selector': "row{row}_col{col}"
.format(row=r, col=c)})
body.append(row_es)
table_attr = self.table_attributes
use_mathjax = get_option("display.html.use_mathjax")
if not use_mathjax:
table_attr = table_attr or ''
if 'class="' in table_attr:
table_attr = table_attr.replace('class="',
'class="tex2jax_ignore ')
else:
table_attr += ' class="tex2jax_ignore"'
return dict(head=head, cellstyle=cellstyle, body=body, uuid=uuid,
precision=precision, table_styles=table_styles,
caption=caption, table_attributes=table_attr)
def format(self, formatter, subset=None):
"""
Format the text display value of cells.
.. versionadded:: 0.18.0
Parameters
----------
formatter : str, callable, or dict
subset : IndexSlice
An argument to ``DataFrame.loc`` that restricts which elements
``formatter`` is applied to.
Returns
-------
self : Styler
Notes
-----
``formatter`` is either an ``a`` or a dict ``{column name: a}`` where
``a`` is one of
- str: this will be wrapped in: ``a.format(x)``
- callable: called with the value of an individual cell
The default display value for numeric values is the "general" (``g``)
format with ``pd.options.display.precision`` precision.
Examples
--------
>>> df = pd.DataFrame(np.random.randn(4, 2), columns=['a', 'b'])
>>> df.style.format("{:.2%}")
>>> df['c'] = ['a', 'b', 'c', 'd']
>>> df.style.format({'c': str.upper})
"""
if subset is None:
row_locs = range(len(self.data))
col_locs = range(len(self.data.columns))
else:
subset = _non_reducing_slice(subset)
if len(subset) == 1:
subset = subset, self.data.columns
sub_df = self.data.loc[subset]
row_locs = self.data.index.get_indexer_for(sub_df.index)
col_locs = self.data.columns.get_indexer_for(sub_df.columns)
if is_dict_like(formatter):
for col, col_formatter in formatter.items():
# formatter must be callable, so '{}' are converted to lambdas
col_formatter = _maybe_wrap_formatter(col_formatter)
col_num = self.data.columns.get_indexer_for([col])[0]
for row_num in row_locs:
self._display_funcs[(row_num, col_num)] = col_formatter
else:
# single scalar to format all cells with
locs = product(*(row_locs, col_locs))
for i, j in locs:
formatter = _maybe_wrap_formatter(formatter)
self._display_funcs[(i, j)] = formatter
return self
def render(self, **kwargs):
"""
Render the built up styles to HTML.
Parameters
----------
`**kwargs` : Any additional keyword arguments are passed through
to ``self.template.render``. This is useful when you need to provide
additional variables for a custom template.
.. versionadded:: 0.20
Returns
-------
rendered : str
The rendered HTML
Notes
-----
``Styler`` objects have defined the ``_repr_html_`` method
which automatically calls ``self.render()`` when it's the
last item in a Notebook cell. When calling ``Styler.render()``
directly, wrap the result in ``IPython.display.HTML`` to view
the rendered HTML in the notebook.
Pandas uses the following keys in render. Arguments passed
in ``**kwargs`` take precedence, so think carefully if you want
to override them:
* head
* cellstyle
* body
* uuid
* precision
* table_styles
* caption
* table_attributes
"""
self._compute()
# TODO: namespace all the pandas keys
d = self._translate()
# filter out empty styles, every cell will have a class
# but the list of props may just be [['', '']].
# so we have the neested anys below
trimmed = [x for x in d['cellstyle']
if any(any(y) for y in x['props'])]
d['cellstyle'] = trimmed
d.update(kwargs)
return self.template.render(**d)
def _update_ctx(self, attrs):
"""
Update the state of the Styler.
Collects a mapping of {index_label: ['<property>: <value>']}.
attrs : Series or DataFrame
should contain strings of '<property>: <value>;<prop2>: <val2>'
Whitespace shouldn't matter and the final trailing ';' shouldn't
matter.
"""
for row_label, v in attrs.iterrows():
for col_label, col in v.iteritems():
i = self.index.get_indexer([row_label])[0]
j = self.columns.get_indexer([col_label])[0]
for pair in col.rstrip(";").split(";"):
self.ctx[(i, j)].append(pair)
def _copy(self, deepcopy=False):
styler = Styler(self.data, precision=self.precision,
caption=self.caption, uuid=self.uuid,
table_styles=self.table_styles)
if deepcopy:
styler.ctx = copy.deepcopy(self.ctx)
styler._todo = copy.deepcopy(self._todo)
else:
styler.ctx = self.ctx
styler._todo = self._todo
return styler
def __copy__(self):
"""
Deep copy by default.
"""
return self._copy(deepcopy=False)
def __deepcopy__(self, memo):
return self._copy(deepcopy=True)
def clear(self):
"""
Reset the styler, removing any previously applied styles.
Returns None.
"""
self.ctx.clear()
self._todo = []
def _compute(self):
"""
Execute the style functions built up in `self._todo`.
Relies on the conventions that all style functions go through
.apply or .applymap. The append styles to apply as tuples of
(application method, *args, **kwargs)
"""
r = self
for func, args, kwargs in self._todo:
r = func(self)(*args, **kwargs)
return r
def _apply(self, func, axis=0, subset=None, **kwargs):
subset = slice(None) if subset is None else subset
subset = _non_reducing_slice(subset)
data = self.data.loc[subset]
if axis is not None:
result = data.apply(func, axis=axis,
result_type='expand', **kwargs)
result.columns = data.columns
else:
result = func(data, **kwargs)
if not isinstance(result, pd.DataFrame):
raise TypeError(
"Function {func!r} must return a DataFrame when "
"passed to `Styler.apply` with axis=None"
.format(func=func))
if not (result.index.equals(data.index) and
result.columns.equals(data.columns)):
msg = ('Result of {func!r} must have identical index and '
'columns as the input'.format(func=func))
raise ValueError(msg)
result_shape = result.shape
expected_shape = self.data.loc[subset].shape
if result_shape != expected_shape:
msg = ("Function {func!r} returned the wrong shape.\n"
"Result has shape: {res}\n"
"Expected shape: {expect}".format(func=func,
res=result.shape,
expect=expected_shape))
raise ValueError(msg)
self._update_ctx(result)
return self
def apply(self, func, axis=0, subset=None, **kwargs):
"""
Apply a function column-wise, row-wise, or table-wise,
updating the HTML representation with the result.
Parameters
----------
func : function
``func`` should take a Series or DataFrame (depending
on ``axis``), and return an object with the same shape.
Must return a DataFrame with identical index and
column labels when ``axis=None``
axis : int, str or None
apply to each column (``axis=0`` or ``'index'``)
or to each row (``axis=1`` or ``'columns'``) or
to the entire DataFrame at once with ``axis=None``
subset : IndexSlice
a valid indexer to limit ``data`` to *before* applying the
function. Consider using a pandas.IndexSlice
kwargs : dict
pass along to ``func``
Returns
-------
self : Styler
Notes
-----
The output shape of ``func`` should match the input, i.e. if
``x`` is the input row, column, or table (depending on ``axis``),
then ``func(x).shape == x.shape`` should be true.
This is similar to ``DataFrame.apply``, except that ``axis=None``
applies the function to the entire DataFrame at once,
rather than column-wise or row-wise.
Examples
--------
>>> def highlight_max(x):
... return ['background-color: yellow' if v == x.max() else ''
for v in x]
...
>>> df = pd.DataFrame(np.random.randn(5, 2))
>>> df.style.apply(highlight_max)
"""
self._todo.append((lambda instance: getattr(instance, '_apply'),
(func, axis, subset), kwargs))
return self
def _applymap(self, func, subset=None, **kwargs):
func = partial(func, **kwargs) # applymap doesn't take kwargs?
if subset is None:
subset = pd.IndexSlice[:]
subset = _non_reducing_slice(subset)
result = self.data.loc[subset].applymap(func)
self._update_ctx(result)
return self
def applymap(self, func, subset=None, **kwargs):
"""
Apply a function elementwise, updating the HTML
representation with the result.
Parameters
----------
func : function
``func`` should take a scalar and return a scalar
subset : IndexSlice
a valid indexer to limit ``data`` to *before* applying the
function. Consider using a pandas.IndexSlice
kwargs : dict
pass along to ``func``
Returns
-------
self : Styler
See Also
--------
Styler.where
"""
self._todo.append((lambda instance: getattr(instance, '_applymap'),
(func, subset), kwargs))
return self
def where(self, cond, value, other=None, subset=None, **kwargs):
"""
Apply a function elementwise, updating the HTML
representation with a style which is selected in
accordance with the return value of a function.
.. versionadded:: 0.21.0
Parameters
----------
cond : callable
``cond`` should take a scalar and return a boolean
value : str
applied when ``cond`` returns true
other : str
applied when ``cond`` returns false
subset : IndexSlice
a valid indexer to limit ``data`` to *before* applying the
function. Consider using a pandas.IndexSlice
kwargs : dict
pass along to ``cond``
Returns
-------
self : Styler
See Also
--------
Styler.applymap
"""
if other is None:
other = ''
return self.applymap(lambda val: value if cond(val) else other,
subset=subset, **kwargs)
def set_precision(self, precision):
"""
Set the precision used to render.
Parameters
----------
precision : int
Returns
-------
self : Styler
"""
self.precision = precision
return self
def set_table_attributes(self, attributes):
"""
Set the table attributes.
These are the items that show up in the opening ``<table>`` tag
in addition to to automatic (by default) id.
Parameters
----------
attributes : string
Returns
-------
self : Styler
Examples
--------
>>> df = pd.DataFrame(np.random.randn(10, 4))
>>> df.style.set_table_attributes('class="pure-table"')
# ... <table class="pure-table"> ...
"""
self.table_attributes = attributes
return self
def export(self):
"""
Export the styles to applied to the current Styler.
Can be applied to a second style with ``Styler.use``.
Returns
-------
styles : list
See Also
--------
Styler.use
"""
return self._todo
def use(self, styles):
"""
Set the styles on the current Styler, possibly using styles
from ``Styler.export``.
Parameters
----------
styles : list
list of style functions
Returns
-------
self : Styler
See Also
--------
Styler.export
"""
self._todo.extend(styles)
return self
def set_uuid(self, uuid):
"""
Set the uuid for a Styler.
Parameters
----------
uuid : str
Returns
-------
self : Styler
"""
self.uuid = uuid
return self
def set_caption(self, caption):
"""
Set the caption on a Styler
Parameters
----------
caption : str
Returns
-------
self : Styler
"""
self.caption = caption
return self
def set_table_styles(self, table_styles):
"""
Set the table styles on a Styler.
These are placed in a ``<style>`` tag before the generated HTML table.
Parameters
----------
table_styles : list
Each individual table_style should be a dictionary with
``selector`` and ``props`` keys. ``selector`` should be a CSS
selector that the style will be applied to (automatically
prefixed by the table's UUID) and ``props`` should be a list of
tuples with ``(attribute, value)``.
Returns
-------
self : Styler
Examples
--------
>>> df = pd.DataFrame(np.random.randn(10, 4))
>>> df.style.set_table_styles(
... [{'selector': 'tr:hover',
... 'props': [('background-color', 'yellow')]}]
... )
"""
self.table_styles = table_styles
return self
def hide_index(self):
"""
Hide any indices from rendering.
.. versionadded:: 0.23.0
Returns
-------
self : Styler
"""
self.hidden_index = True
return self
def hide_columns(self, subset):
"""
Hide columns from rendering.
.. versionadded:: 0.23.0
Parameters
----------
subset : IndexSlice
An argument to ``DataFrame.loc`` that identifies which columns
are hidden.
Returns
-------
self : Styler
"""
subset = _non_reducing_slice(subset)
hidden_df = self.data.loc[subset]
self.hidden_columns = self.columns.get_indexer_for(hidden_df.columns)
return self
# -----------------------------------------------------------------------
# A collection of "builtin" styles
# -----------------------------------------------------------------------
@staticmethod
def _highlight_null(v, null_color):
return ('background-color: {color}'.format(color=null_color)
if pd.isna(v) else '')
def highlight_null(self, null_color='red'):
"""
Shade the background ``null_color`` for missing values.
Parameters
----------
null_color : str
Returns
-------
self : Styler
"""
self.applymap(self._highlight_null, null_color=null_color)
return self
def background_gradient(self, cmap='PuBu', low=0, high=0, axis=0,
subset=None, text_color_threshold=0.408):
"""
Color the background in a gradient according to
the data in each column (optionally row).
Requires matplotlib.
Parameters
----------
cmap : str or colormap
matplotlib colormap
low, high : float
compress the range by these values.
axis : int or str
1 or 'columns' for columnwise, 0 or 'index' for rowwise
subset : IndexSlice
a valid slice for ``data`` to limit the style application to
text_color_threshold : float or int
luminance threshold for determining text color. Facilitates text
visibility across varying background colors. From 0 to 1.
0 = all text is dark colored, 1 = all text is light colored.
.. versionadded:: 0.24.0
Returns
-------
self : Styler
Raises
------
ValueError
If ``text_color_threshold`` is not a value from 0 to 1.
Notes
-----
Set ``text_color_threshold`` or tune ``low`` and ``high`` to keep the
text legible by not using the entire range of the color map. The range
of the data is extended by ``low * (x.max() - x.min())`` and ``high *
(x.max() - x.min())`` before normalizing.
"""
subset = _maybe_numeric_slice(self.data, subset)
subset = _non_reducing_slice(subset)
self.apply(self._background_gradient, cmap=cmap, subset=subset,
axis=axis, low=low, high=high,
text_color_threshold=text_color_threshold)
return self
@staticmethod
def _background_gradient(s, cmap='PuBu', low=0, high=0,
text_color_threshold=0.408):
"""
Color background in a range according to the data.
"""
if (not isinstance(text_color_threshold, (float, int)) or
not 0 <= text_color_threshold <= 1):
msg = "`text_color_threshold` must be a value from 0 to 1."
raise ValueError(msg)
with _mpl(Styler.background_gradient) as (plt, colors):
smin = s.values.min()
smax = s.values.max()
rng = smax - smin
# extend lower / upper bounds, compresses color range
norm = colors.Normalize(smin - (rng * low), smax + (rng * high))
# matplotlib colors.Normalize modifies inplace?
# https://github.com/matplotlib/matplotlib/issues/5427
rgbas = plt.cm.get_cmap(cmap)(norm(s.values))
def relative_luminance(rgba):
"""
Calculate relative luminance of a color.
The calculation adheres to the W3C standards
(https://www.w3.org/WAI/GL/wiki/Relative_luminance)
Parameters
----------
color : rgb or rgba tuple
Returns
-------
float
The relative luminance as a value from 0 to 1
"""
r, g, b = (
x / 12.92 if x <= 0.03928 else ((x + 0.055) / 1.055 ** 2.4)
for x in rgba[:3]
)
return 0.2126 * r + 0.7152 * g + 0.0722 * b
def css(rgba):
dark = relative_luminance(rgba) < text_color_threshold
text_color = '#f1f1f1' if dark else '#000000'
return 'background-color: {b};color: {c};'.format(
b=colors.rgb2hex(rgba), c=text_color
)
if s.ndim == 1:
return [css(rgba) for rgba in rgbas]
else:
return pd.DataFrame(
[[css(rgba) for rgba in row] for row in rgbas],
index=s.index, columns=s.columns
)
def set_properties(self, subset=None, **kwargs):
"""
Convenience method for setting one or more non-data dependent
properties or each cell.
Parameters
----------
subset : IndexSlice
a valid slice for ``data`` to limit the style application to
kwargs : dict
property: value pairs to be set for each cell
Returns
-------
self : Styler
Examples
--------
>>> df = pd.DataFrame(np.random.randn(10, 4))
>>> df.style.set_properties(color="white", align="right")
>>> df.style.set_properties(**{'background-color': 'yellow'})
"""
values = ';'.join('{p}: {v}'.format(p=p, v=v)
for p, v in kwargs.items())
f = lambda x: values
return self.applymap(f, subset=subset)
@staticmethod
def _bar(s, align, colors, width=100, vmin=None, vmax=None):
"""
Draw bar chart in dataframe cells.
"""
# Get input value range.
smin = s.min() if vmin is None else vmin
if isinstance(smin, ABCSeries):
smin = smin.min()
smax = s.max() if vmax is None else vmax
if isinstance(smax, ABCSeries):
smax = smax.max()
if align == 'mid':
smin = min(0, smin)
smax = max(0, smax)
elif align == 'zero':
# For "zero" mode, we want the range to be symmetrical around zero.
smax = max(abs(smin), abs(smax))
smin = -smax
# Transform to percent-range of linear-gradient
normed = width * (s.values - smin) / (smax - smin + 1e-12)
zero = -width * smin / (smax - smin + 1e-12)
def css_bar(start, end, color):
"""
Generate CSS code to draw a bar from start to end.
"""
css = 'width: 10em; height: 80%;'
if end > start:
css += 'background: linear-gradient(90deg,'
if start > 0:
css += ' transparent {s:.1f}%, {c} {s:.1f}%, '.format(
s=start, c=color
)
css += '{c} {e:.1f}%, transparent {e:.1f}%)'.format(
e=min(end, width), c=color,
)
return css
def css(x):
if pd.isna(x):
return ''
# avoid deprecated indexing `colors[x > zero]`
color = colors[1] if x > zero else colors[0]
if align == 'left':
return css_bar(0, x, color)
else:
return css_bar(min(x, zero), max(x, zero), color)
if s.ndim == 1:
return [css(x) for x in normed]
else:
return pd.DataFrame(
[[css(x) for x in row] for row in normed],
index=s.index, columns=s.columns
)
def bar(self, subset=None, axis=0, color='#d65f5f', width=100,
align='left', vmin=None, vmax=None):
"""
Draw bar chart in the cell backgrounds.
Parameters
----------
subset : IndexSlice, optional
A valid slice for `data` to limit the style application to.
axis : int, str or None, default 0
Apply to each column (`axis=0` or `'index'`)
or to each row (`axis=1` or `'columns'`) or
to the entire DataFrame at once with `axis=None`.
color : str or 2-tuple/list
If a str is passed, the color is the same for both
negative and positive numbers. If 2-tuple/list is used, the
first element is the color_negative and the second is the
color_positive (eg: ['#d65f5f', '#5fba7d']).
width : float, default 100
A number between 0 or 100. The largest value will cover `width`
percent of the cell's width.
align : {'left', 'zero',' mid'}, default 'left'
How to align the bars with the cells.
- 'left' : the min value starts at the left of the cell.
- 'zero' : a value of zero is located at the center of the cell.
- 'mid' : the center of the cell is at (max-min)/2, or
if values are all negative (positive) the zero is aligned
at the right (left) of the cell.
.. versionadded:: 0.20.0
vmin : float, optional
Minimum bar value, defining the left hand limit
of the bar drawing range, lower values are clipped to `vmin`.
When None (default): the minimum value of the data will be used.
.. versionadded:: 0.24.0
vmax : float, optional
Maximum bar value, defining the right hand limit
of the bar drawing range, higher values are clipped to `vmax`.
When None (default): the maximum value of the data will be used.
.. versionadded:: 0.24.0
Returns
-------
self : Styler
"""
if align not in ('left', 'zero', 'mid'):
raise ValueError("`align` must be one of {'left', 'zero',' mid'}")
if not (is_list_like(color)):
color = [color, color]
elif len(color) == 1:
color = [color[0], color[0]]
elif len(color) > 2:
raise ValueError("`color` must be string or a list-like"
" of length 2: [`color_neg`, `color_pos`]"
" (eg: color=['#d65f5f', '#5fba7d'])")
subset = _maybe_numeric_slice(self.data, subset)
subset = _non_reducing_slice(subset)
self.apply(self._bar, subset=subset, axis=axis,
align=align, colors=color, width=width,
vmin=vmin, vmax=vmax)
return self
def highlight_max(self, subset=None, color='yellow', axis=0):
"""
Highlight the maximum by shading the background.
Parameters
----------
subset : IndexSlice, default None
a valid slice for ``data`` to limit the style application to
color : str, default 'yellow'
axis : int, str, or None; default 0
0 or 'index' for columnwise (default), 1 or 'columns' for rowwise,
or ``None`` for tablewise
Returns
-------
self : Styler
"""
return self._highlight_handler(subset=subset, color=color, axis=axis,
max_=True)
def highlight_min(self, subset=None, color='yellow', axis=0):
"""
Highlight the minimum by shading the background.
Parameters
----------
subset : IndexSlice, default None
a valid slice for ``data`` to limit the style application to
color : str, default 'yellow'
axis : int, str, or None; default 0
0 or 'index' for columnwise (default), 1 or 'columns' for rowwise,
or ``None`` for tablewise
Returns
-------
self : Styler
"""
return self._highlight_handler(subset=subset, color=color, axis=axis,
max_=False)
def _highlight_handler(self, subset=None, color='yellow', axis=None,
max_=True):
subset = _non_reducing_slice(_maybe_numeric_slice(self.data, subset))
self.apply(self._highlight_extrema, color=color, axis=axis,
subset=subset, max_=max_)
return self
@staticmethod
def _highlight_extrema(data, color='yellow', max_=True):
"""
Highlight the min or max in a Series or DataFrame.
"""
attr = 'background-color: {0}'.format(color)
if data.ndim == 1: # Series from .apply
if max_:
extrema = data == data.max()
else:
extrema = data == data.min()
return [attr if v else '' for v in extrema]
else: # DataFrame from .tee
if max_:
extrema = data == data.max().max()
else:
extrema = data == data.min().min()
return pd.DataFrame(np.where(extrema, attr, ''),
index=data.index, columns=data.columns)
@classmethod
def from_custom_template(cls, searchpath, name):
"""
Factory function for creating a subclass of ``Styler``
with a custom template and Jinja environment.
Parameters
----------
searchpath : str or list
Path or paths of directories containing the templates
name : str
Name of your custom template to use for rendering
Returns
-------
MyStyler : subclass of Styler
Has the correct ``env`` and ``template`` class attributes set.
"""
loader = ChoiceLoader([
FileSystemLoader(searchpath),
cls.loader,
])
class MyStyler(cls):
env = Environment(loader=loader)
template = env.get_template(name)
return MyStyler
def pipe(self, func, *args, **kwargs):
"""
Apply ``func(self, *args, **kwargs)``, and return the result.
.. versionadded:: 0.24.0
Parameters
----------
func : function
Function to apply to the Styler. Alternatively, a
``(callable, keyword)`` tuple where ``keyword`` is a string
indicating the keyword of ``callable`` that expects the Styler.
*args, **kwargs :
Arguments passed to `func`.
Returns
-------
object :
The value returned by ``func``.
See Also
--------
DataFrame.pipe : Analogous method for DataFrame.
Styler.apply : Apply a function row-wise, column-wise, or table-wise to
modify the dataframe's styling.
Notes
-----
Like :meth:`DataFrame.pipe`, this method can simplify the
application of several user-defined functions to a styler. Instead
of writing:
.. code-block:: python
f(g(df.style.set_precision(3), arg1=a), arg2=b, arg3=c)
users can write:
.. code-block:: python
(df.style.set_precision(3)
.pipe(g, arg1=a)
.pipe(f, arg2=b, arg3=c))
In particular, this allows users to define functions that take a
styler object, along with other parameters, and return the styler after
making styling changes (such as calling :meth:`Styler.apply` or
:meth:`Styler.set_properties`). Using ``.pipe``, these user-defined
style "transformations" can be interleaved with calls to the built-in
Styler interface.
Examples
--------
>>> def format_conversion(styler):
... return (styler.set_properties(**{'text-align': 'right'})
... .format({'conversion': '{:.1%}'}))
The user-defined ``format_conversion`` function above can be called
within a sequence of other style modifications:
>>> df = pd.DataFrame({'trial': list(range(5)),
... 'conversion': [0.75, 0.85, np.nan, 0.7, 0.72]})
>>> (df.style
... .highlight_min(subset=['conversion'], color='yellow')
... .pipe(format_conversion)
... .set_caption("Results with minimum conversion highlighted."))
"""
return com._pipe(self, func, *args, **kwargs)
def _is_visible(idx_row, idx_col, lengths):
"""
Index -> {(idx_row, idx_col): bool}).
"""
return (idx_col, idx_row) in lengths
def _get_level_lengths(index, hidden_elements=None):
"""
Given an index, find the level length for each element.
Optional argument is a list of index positions which
should not be visible.
Result is a dictionary of (level, inital_position): span
"""
sentinel = object()
levels = index.format(sparsify=sentinel, adjoin=False, names=False)
if hidden_elements is None:
hidden_elements = []
lengths = {}
if index.nlevels == 1:
for i, value in enumerate(levels):
if(i not in hidden_elements):
lengths[(0, i)] = 1
return lengths
for i, lvl in enumerate(levels):
for j, row in enumerate(lvl):
if not get_option('display.multi_sparse'):
lengths[(i, j)] = 1
elif (row != sentinel) and (j not in hidden_elements):
last_label = j
lengths[(i, last_label)] = 1
elif (row != sentinel):
# even if its hidden, keep track of it in case
# length >1 and later elements are visible
last_label = j
lengths[(i, last_label)] = 0
elif(j not in hidden_elements):
lengths[(i, last_label)] += 1
non_zero_lengths = {
element: length for element, length in lengths.items() if length >= 1}
return non_zero_lengths
def _maybe_wrap_formatter(formatter):
if is_string_like(formatter):
return lambda x: formatter.format(x)
elif callable(formatter):
return formatter
else:
msg = ("Expected a template string or callable, got {formatter} "
"instead".format(formatter=formatter))
raise TypeError(msg)
| 34.005848 | 79 | 0.529923 | [
"BSD-3-Clause"
] | harunpehlivan/pandas | pandas/io/formats/style.py | 46,520 | Python |
import datetime
import json
import logging
import socket
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
class Base(dict):
"""Base metric class"""
def __init__(
self,
name: str,
environment: str,
zone: str,
timestamp: str = None
):
super().__init__()
self['name'] = name
self['environment'] = environment
self['zone'] = zone
if timestamp:
self['timestamp'] = timestamp
else:
self['timestamp'] = datetime.datetime.now().isoformat()
def serialize(self) -> str:
"""Serialize data as json string"""
try:
return json.dumps(self, separators=(',', ':'))
except json.JSONDecodeError as err:
return err.msg
def __bytes__(self) -> bytes:
"""Returns bytes interpretation of data"""
data = self.serialize()
return ('%s\n' % data).encode('utf8')
class Metric(Base):
"""Base metric"""
def __init__(
self,
name: str,
value: int,
environment: str = None,
zone: str = None,
**kwargs
):
super().__init__(
name=name,
environment=environment,
zone=zone,
)
self['__type'] = 'metric'
self['metric_type'] = kwargs.get('metric_type', 'ms')
self['value'] = value
self.update(**kwargs)
def get_message(msg):
"""Get metric instance from dictionary or string"""
if not isinstance(msg, dict):
try:
msg = json.loads(msg, encoding='utf-8')
except json.JSONDecodeError:
return None
typ = msg.pop('__type')
if typ == 'metric':
return Metric(**msg)
return None
def push_metric(data: Metric, message_socket_address):
"""push metrics to socket"""
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as _socket:
try:
_socket.connect(message_socket_address)
msg = '%s\n' % data.serialize()
_socket.sendall(msg.encode('utf8'))
return 'success'
except socket.error as err:
LOGGER.exception('Error establishing connection to socket')
raise err
except Exception as ex:
LOGGER.exception('Error writing message to socket')
raise ex
| 26.021978 | 71 | 0.559966 | [
"Apache-2.0"
] | opentelekomcloud-infra/csm-test-utils | csm_test_utils/message.py | 2,368 | Python |
# -*- coding: utf-8 -*-
import os
import shutil
def make_empty_folder(folder_path:str):
if os.path.exists(folder_path):
if os.path.isdir(folder_path):
shutil.rmtree(folder_path)
else:
os.remove(folder_path)
os.mkdir(folder_path)
def copy_files(from_path:str, to_path:str, extension:str):
files = os.listdir(from_path)
for file in files:
if file.endswith(extension):
shutil.copy(from_path + '/' + file, to_path)
def append_src(to_file, from_file: str):
with open(from_file, 'r') as f:
for line in f:
to_file.write(line)
if __name__ == '__main__':
project_name = 'viscomp_final'
source_folder_name = 'JellyfishAquarium'
src_folder = './' + source_folder_name
out_folder = './' + project_name
make_empty_folder(out_folder)
for extension in ['png', 'jpg']:
copy_files(src_folder, out_folder, extension)
# combine all pde files into viscomp_final.pde
with open(f'{out_folder}/{project_name}.pde', 'w') as f:
append_src(f, f'{src_folder}/{source_folder_name}.pde')
files = os.listdir(src_folder)
for file in files:
if file.endswith('.pde') and file != f'{source_folder_name}.pde':
f.write('\n\n')
append_src(f, src_folder + '/' + file)
| 28.375 | 77 | 0.618943 | [
"MIT"
] | TrpFrog/jellyfish-aquarium | submission_builder.py | 1,362 | Python |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.