ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a43b975159e0f0928e2ada457b6816d0788329e | # coding: utf-8
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class CreateVpcResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'vpc': 'Vpc'
}
attribute_map = {
'vpc': 'vpc'
}
def __init__(self, vpc=None):
"""CreateVpcResponse - a model defined in huaweicloud sdk"""
super(CreateVpcResponse, self).__init__()
self._vpc = None
self.discriminator = None
if vpc is not None:
self.vpc = vpc
@property
def vpc(self):
"""Gets the vpc of this CreateVpcResponse.
:return: The vpc of this CreateVpcResponse.
:rtype: Vpc
"""
return self._vpc
@vpc.setter
def vpc(self, vpc):
"""Sets the vpc of this CreateVpcResponse.
:param vpc: The vpc of this CreateVpcResponse.
:type: Vpc
"""
self._vpc = vpc
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateVpcResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
py | 1a43ba67475855fe716da039b50f784b45e30626 | import json
from django.shortcuts import get_object_or_404, render
from django.urls import reverse
from wagtail.admin.forms.search import SearchForm
from wagtail.admin.modal_workflow import render_modal_workflow
from wagtail.admin.utils import PermissionPolicyChecker
from wagtail.core import hooks
from wagtail.core.models import Collection
from wagtail.documents.forms import get_document_form
from wagtail.documents.models import get_document_model
from wagtail.documents.permissions import permission_policy
from wagtail.search import index as search_index
from wagtail.utils.pagination import paginate
from django.utils.translation import ugettext as _
# permission_checker = PermissionPolicyChecker(permission_policy)
# def get_document_json(document):
# """
# helper function: given a document, return the json to pass back to the
# chooser panel
# """
# return json.dumps({
# 'id': document.id,
# 'title': document.title,
# 'url': document.url,
# 'filename': document.filename,
# 'edit_link': reverse('wagtaildocs:edit', args=(document.id,)),
# })
# def chooser(request):
# Document = get_document_model()
# if permission_policy.user_has_permission(request.user, 'add'):
# DocumentForm = get_document_form(Document)
# uploadform = DocumentForm(user=request.user)
# else:
# uploadform = None
# documents = Document.objects.all()
# # allow hooks to modify the queryset
# for hook in hooks.get_hooks('construct_document_chooser_queryset'):
# documents = hook(documents, request)
# q = None
# if 'q' in request.GET or 'p' in request.GET or 'collection_id' in request.GET:
# collection_id = request.GET.get('collection_id')
# if collection_id:
# documents = documents.filter(collection=collection_id)
# searchform = SearchForm(request.GET)
# if searchform.is_valid():
# q = searchform.cleaned_data['q']
# documents = documents.search(q)
# is_searching = True
# else:
# documents = documents.order_by('-created_at')
# is_searching = False
# # Pagination
# paginator, documents = paginate(request, documents, per_page=10)
# return render(request, "wagtaildocs/chooser/results.html", {
# 'documents': documents,
# 'query_string': q,
# 'is_searching': is_searching,
# })
# else:
# searchform = SearchForm()
# collections = Collection.objects.all()
# if len(collections) < 2:
# collections = None
# documents = documents.order_by('-created_at')
# paginator, documents = paginate(request, documents, per_page=10)
# return render_modal_workflow(request, 'wagtaildocs/chooser/chooser.html', 'wagtaildocs/chooser/chooser.js', {
# 'documents': documents,
# 'uploadform': uploadform,
# 'searchform': searchform,
# 'collections': collections,
# 'is_searching': False,
# })
# def document_chosen(request, document_id):
# document = get_object_or_404(get_document_model(), id=document_id)
# return render_modal_workflow(
# request, None, 'wagtaildocs/chooser/document_chosen.js',
# {'document_json': get_document_json(document)}
# )
# @permission_checker.require('add')
# def chooser_upload(request):
# Document = get_document_model()
# DocumentForm = get_document_form(Document)
# if request.method == 'POST':
# document = Document(uploaded_by_user=request.user)
# form = DocumentForm(request.POST, request.FILES, instance=document, user=request.user)
# if form.is_valid():
# form.save()
# # Reindex the document to make sure all tags are indexed
# search_index.insert_or_update_object(document)
# return render_modal_workflow(
# request, None, 'wagtaildocs/chooser/document_chosen.js',
# {'document_json': get_document_json(document)}
# )
# else:
# form = DocumentForm(user=request.user)
# documents = Document.objects.order_by('title')
# return render_modal_workflow(
# request, 'wagtaildocs/chooser/chooser.html', 'wagtaildocs/chooser/chooser.js',
# {'documents': documents, 'uploadform': form}
# )
########
permission_checker = PermissionPolicyChecker(permission_policy)
def get_chooser_context():
"""construct context variables needed by the chooser JS"""
return {
'step': 'chooser',
'error_label': _("Server Error"),
'error_message': _("Report this error to your webmaster with the following information:"),
'tag_autocomplete_url': reverse('wagtailadmin_tag_autocomplete'),
}
def get_document_result_data(document):
"""
helper function: given a document, return the json data to pass back to the
chooser panel
"""
return {
'id': document.id,
'title': document.title,
'url': document.url,
'filename': document.filename,
'edit_link': reverse('wagtaildocs:edit', args=(document.id,)),
}
def chooser(request):
Document = get_document_model()
if permission_policy.user_has_permission(request.user, 'add'):
DocumentForm = get_document_form(Document)
uploadform = DocumentForm(user=request.user)
else:
uploadform = None
documents = Document.objects.all()
# allow hooks to modify the queryset
for hook in hooks.get_hooks('construct_document_chooser_queryset'):
documents = hook(documents, request)
q = None
if 'q' in request.GET or 'p' in request.GET or 'collection_id' in request.GET:
collection_id = request.GET.get('collection_id')
if collection_id:
documents = documents.filter(collection=collection_id)
documents_exist = documents.exists()
searchform = SearchForm(request.GET)
if searchform.is_valid():
q = searchform.cleaned_data['q']
documents = documents.search(q)
is_searching = True
else:
documents = documents.order_by('-created_at')
is_searching = False
# Pagination
paginator, documents = paginate(request, documents, per_page=10)
return render(request, "wagtaildocs/chooser/results.html", {
'documents': documents,
'documents_exist': documents_exist,
'uploadform': uploadform,
'query_string': q,
'is_searching': is_searching,
'collection_id': collection_id,
})
else:
searchform = SearchForm()
collections = Collection.objects.all()
if len(collections) < 2:
collections = None
else:
collections = Collection.order_for_display(collections)
documents = documents.order_by('-created_at')
documents_exist = documents.exists()
paginator, documents = paginate(request, documents, per_page=10)
return render_modal_workflow(request, 'wagtaildocs/chooser/chooser.html', None, {
'documents': documents,
'documents_exist': documents_exist,
'uploadform': uploadform,
'searchform': searchform,
'collections': collections,
'is_searching': False,
}, json_data=get_chooser_context())
def document_chosen(request, document_id):
document = get_object_or_404(get_document_model(), id=document_id)
return render_modal_workflow(
request, None, None,
None, json_data={'step': 'document_chosen', 'result': get_document_result_data(document)}
)
@permission_checker.require('add')
def chooser_upload(request):
Document = get_document_model()
DocumentForm = get_document_form(Document)
if request.method == 'POST':
document = Document(uploaded_by_user=request.user)
form = DocumentForm(request.POST, request.FILES, instance=document, user=request.user)
if form.is_valid():
document.file_size = document.file.size
# Set new document file hash
document.file.seek(0)
document._set_file_hash(document.file.read())
document.file.seek(0)
form.save()
# Reindex the document to make sure all tags are indexed
search_index.insert_or_update_object(document)
return render_modal_workflow(
request, None, None,
None, json_data={'step': 'document_chosen', 'result': get_document_result_data(document)}
)
else:
form = DocumentForm(user=request.user)
documents = Document.objects.order_by('title')
return render_modal_workflow(
request, 'wagtaildocs/chooser/chooser.html', None,
{'documents': documents, 'uploadform': form},
json_data=get_chooser_context()
)
|
py | 1a43bbc711ca027a82bb9384f6a745dbf10d4111 | # Main script for inducing LTL contrastive explanations from input set of traces
#
# ARGUMENTS:
# -d [input.json] : json file containing the required input (see README)
#
# OUTPUT:
# output.json : json output containing top-10 induced results
#
#
# Written by Joseph Kim
import argparse
from itertools import permutations
import json
from operator import itemgetter
import time
# Local imports
import ltlfunc
import interestingness
#############################################################
def run_ltl_inference(data, output_filepath=None):
# Vocabulary (lowercased, unique)
vocab = [s.lower() for s in data['vocab']]
vocab = list(set(vocab))
# Traces - organize both pos and neg clusters
cluster_A = []
for i, trace in enumerate(data['traces_pos']):
trace = [[v.lower() for v in s] for s in trace] # lowercase
temp = dict()
temp['name'] = 'a' + str(i) # Create a name id
temp['trace'] = tuple(trace) # Use tuple
cluster_A.append(temp)
cluster_B = []
for i, trace in enumerate(data['traces_neg']):
trace = [[v.lower() for v in s] for s in trace]
temp = dict()
temp['name'] = 'b' + str(i)
temp['trace'] = tuple(trace)
cluster_B.append(temp)
# X = (cluster_A, cluster_B) # Evidence
# Parameters
inference = data['params']['inference']
iterations = data['params']['iterations']
conjoin = data['params']['conjoin']
ltl_sample_cnt = data['params'].get('ltl_sample_cnt', 10)
run_reversed_inference = data['params'].get('reversed_inference', True)
verbose = data['params'].get('verbose', False)
# Default inference parameters
params = dict()
params['alpha'] = data['params'].get('alpha', 0.01)
params['beta'] = data['params'].get('beta', 0.01)
params['lambda'] = data['params'].get('lambda', 0.60)
params['epsilon'] = data['params'].get('epsilon', 0.2)
# Print statistics
print('\nSize of vocabulary = %s' % len(vocab))
# Get LTL templates
if 'probs_templates' in data:
probs_templates = data['probs_templates']
else:
probs_templates = None
templates = ltlfunc.getLTLtemplates(user_probs=probs_templates)
# Get permutation tables
perm_table = dict()
perm_table[1] = [list(i) for i in permutations(vocab, 1)]
perm_table[2] = [list(i) for i in permutations(vocab, 2)]
ltl_rundata = [
{'X': (cluster_A, cluster_B), 'reversed': False}
]
if run_reversed_inference:
ltl_rundata.append(
{'X': (cluster_B, cluster_A), 'reversed': True}
)
# Preparing json output
output_inference = list()
for data_X in ltl_rundata:
X = data_X['X']
reversed = data_X['reversed']
cluster_A_inf, cluster_B_inf = X
output = list()
print('*' * 50)
print('Running LTL inference with reversed mode = %s' % str(reversed))
print('Number of positive traces = %s' % len(cluster_A_inf))
print('Number of negative traces = %s' % len(cluster_B_inf))
#######################################################
# RUN INFERENCE
#
# 1) Metropolis-Hastings Sampling
if inference == 'mh':
# Initial guess
ltl_initial = ltlfunc.samplePrior(templates, vocab, perm_table, params['lambda'], conjoin)
print('\n-Running MH..')
print('-Initial guess = ' + ltl_initial['str_friendly'])
st = time.time()
# Preparation
burn_in_mh = 500
num_iter_mh = iterations + burn_in_mh
memory = dict()
cache = dict()
# Run MH Sampler
sampler = ltlfunc.MH_sampler(ltl_initial, X, vocab, templates, params, perm_table, memory, cache, conjoin)
sampler.runMH(num_iter_mh, burn_in_mh, verbose=verbose)
memory = sampler.memory
# Print stats
print('-MH runtime = ' + format(time.time() - st, '.3f'))
print(
'-MH number of accepted samples = %s / %s' % (sampler.num_accepts, len(sampler.accept_reject_history)))
print('-MH number of perfect valid samples = %s / %s' %
(int(sum([j for j in sampler.cscore_history if j == 1])), num_iter_mh - burn_in_mh))
# Rank posterior and print top-10 samples
print('\n-MH Top-{} Specs'.format(ltl_sample_cnt))
ranked = sorted(sampler.posterior_dict, key=sampler.posterior_dict.get, reverse=True)
i = 0
for r in ranked:
cscore = sampler.cscore_dict[r]
cscore1, cscore2 = memory[r]
cscore2 = 1 - cscore2
ltl_meaning = sampler.ltl_str_meanings[r]['meaning']
ltl = sampler.ltl_log[r]
ltl_name = ltl['name']
ltl_props = ltl['props_list'] if conjoin else [ltl['props']]
# Positive set support
positive_support = interestingness.compute_support(cluster_A_inf, ltl_name, ltl_props, vocab)
if positive_support == 0:
continue
i += 1
print('-' * 30)
print(r, end='')
print(' accuracy = %s' % cscore)
print(' (individual scores): cscore1: %f, cscore2: %f' % (cscore1, cscore2))
print(' Interestingness (support) : %f' % positive_support)
print(' Meaning: %s' % ltl_meaning)
if i >= ltl_sample_cnt:
break
# Adding to output
temp = dict()
temp['formula'] = r
temp['meaning'] = sampler.ltl_str_meanings[r]
temp['accuracy'] = cscore
temp['cscores_individual'] = (cscore1, cscore2)
temp['interestingness'] = positive_support
temp['reversed'] = reversed
output.append(temp)
# 2) Brute force search (delimited enumeration)
elif inference == 'brute':
print('\n-Running Brute Force Search')
st = time.time()
if conjoin:
# Brute force random sampler (b/c pre-enumerating everything is intractable)
print('-Collecting delimited set')
ltl_full = []
history = []
num_brute_force = iterations
# Collection loop
while len(history) < num_brute_force:
s = ltlfunc.samplePrior(templates, vocab, perm_table, conjoin=conjoin, doRandom=True)
ltl_str = s['str_friendly']
if ltl_str not in history:
ltl_full.append(s)
history.append(ltl_str)
print('-All delimited set collected. Time spent = ' + format(time.time() - st, '.3f'))
else:
# If not using conjunction, then obtain a full brute force list
ltl_full = []
for template in templates:
results = ltlfunc.instantiateLTLvariablePermutate(template, vocab)
ltl_full += results
print('-Number of total possible LTL specs (no conjunctions): %s' % len(ltl_full))
# Exact inference on collection
memory = dict()
cache = dict()
for ltl_instance in ltl_full:
log_posterior, cscore, memory = ltlfunc.computePosterior(ltl_instance, X, vocab, params, memory, cache,
conjoin)
ltl_instance['posterior'] = log_posterior
ltl_instance['cscore'] = cscore
ltl_instance['cscores_individual'] = memory[ltl_instance['str_friendly']]
print('-Brute force collection and posterior collection complete. Time spent = ' + format(time.time() - st,
'.3f'))
# Rank posterior and print top-10 samples
print('\n-Enumeration Top-{} Specs'.format(ltl_sample_cnt))
ranked = sorted(ltl_full, key=itemgetter('posterior'), reverse=True)
i = 0
for r in ranked:
cscore1, cscore2 = r['cscores_individual']
cscore2 = 1 - cscore2
ltl_name, ltl_props = r['name'], r['props_list']
# Positive set support
positive_support = interestingness.compute_support(cluster_A_inf, ltl_name, ltl_props, vocab)
if positive_support == 0:
continue
i += 1
print('-' * 30)
print(r['str_friendly'], end='')
print(' accuracy = %s' % r['cscore'])
print(' (individual scores): cscore1: %f, cscore2: %f' % (cscore1, cscore2))
print(' Interestingness (support) : %f' % positive_support)
print(' Meaning: %s' % r['str_meaning'])
if i >= ltl_sample_cnt:
break
# Adding to output
temp = dict()
temp['formula'] = r['str_friendly']
temp['meaning'] = r['str_meaning']
temp['accuracy'] = r['cscore']
temp['cscores_individual'] = (cscore1, cscore2)
temp['interestingness'] = positive_support
temp['reversed'] = reversed
output.append(temp)
else:
raise AttributeError("Wrong inference mode specified.")
#######################################################
# END OF INFERENCE
#######################################################
# Append local ltl order inference output to global output list
output_inference.extend(output)
output_inference = sorted(output_inference, key=lambda x: x['accuracy'], reverse=True)[:ltl_sample_cnt]
# Dump output
if output_filepath is not None:
with open(output_filepath, 'w') as f:
json.dump(output_inference, f, indent=4)
return output_inference
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main script for LTL contrastive explanation induction')
parser.add_argument('-d', type=str, required=True, help='input JSON file')
parser.add_argument('-o', type=str, required=False, default='output.json', help='output JSON file')
args = parser.parse_args()
# Load input
with open(args.d) as f:
data = json.load(f)
run_ltl_inference(data, args.o)
|
py | 1a43bccbebb4e5b49bd682e5afdb990471aaa9db | # -*- coding: utf-8 -*-
# Copyright (c) 2019 - 2022 Geode-solutions
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os, sys, platform
if sys.version_info >= (3,8,0) and platform.system() == "Windows":
for path in [x.strip() for x in os.environ['PATH'].split(';') if x]:
os.add_dll_directory(path)
import opengeode_inspector_py_inspector as inspector
print(dir(inspector))
import opengeode as geode
def check_non_colocation2D():
pointset = geode.PointSet2D.create()
builder = geode.PointSetBuilder2D.create( pointset )
builder.create_vertices( 4 )
builder.set_point( 0, geode.Point2D( [ 0., 2. ] ) )
builder.set_point( 1, geode.Point2D( [ 2., 0. ] ) )
builder.set_point( 2, geode.Point2D( [ 1., 4. ] ) )
builder.set_point( 3, geode.Point2D( [ 3., 3. ] ) )
colocation_inspector = inspector.PointSetColocation2D( pointset )
if colocation_inspector.mesh_has_colocated_points():
raise ValueError( "[Test] PointSet has colocated points when it should have none." )
if not colocation_inspector.nb_colocated_points() == 0:
raise ValueError( "[Test] PointSet has more colocated points than it should." )
if colocation_inspector.colocated_points_groups():
raise ValueError( "[Test] PointSet points are shown colocated whereas they are not." )
def check_colocation2D():
pointset = geode.PointSet2D.create()
builder = geode.PointSetBuilder2D.create( pointset )
builder.create_vertices( 7 )
builder.set_point( 0, geode.Point2D( [ 0., 2. ] ) )
builder.set_point( 1, geode.Point2D( [ 0., 2. ] ) )
builder.set_point( 2, geode.Point2D( [ 0., 0. ] ) )
builder.set_point( 3, geode.Point2D( [ 2., 0. ] ) )
builder.set_point( 4, geode.Point2D( [ 1., 4. ] ) )
builder.set_point( 5, geode.Point2D( [ 2., geode.global_epsilon / 2 ] ) )
builder.set_point( 6, geode.Point2D( [ geode.global_epsilon / 1.1, 2. ] ) )
colocation_inspector = inspector.PointSetColocation2D( pointset )
if not colocation_inspector.mesh_has_colocated_points():
raise ValueError( "[Test] PointSet doesn't have colocated points whereas it should have several." )
if not colocation_inspector.nb_colocated_points() == 3:
raise ValueError( "[Test] PointSet has wrong number of colocated points." )
first_colocated_points_group = [ 0, 1, 6 ]
if not colocation_inspector.colocated_points_groups()[0] == first_colocated_points_group:
raise ValueError( "[Test] PointSet has wrong first colocated points group." )
second_colocated_points_group = [ 3, 5 ]
if not colocation_inspector.colocated_points_groups()[1] == second_colocated_points_group:
raise ValueError( "[Test] PointSet has wrong second colocated points group." )
def check_non_colocation3D():
pointset = geode.PointSet3D.create()
builder = geode.PointSetBuilder3D.create( pointset )
builder.create_vertices( 4 )
builder.set_point( 0, geode.Point3D( [ 0., 2., 0. ] ) )
builder.set_point( 1, geode.Point3D( [ 2., 0., 0.5 ] ) )
builder.set_point( 2, geode.Point3D( [ 1., 4., 1. ] ) )
builder.set_point( 3, geode.Point3D( [ 3., 3., 2. ] ) )
colocation_inspector = inspector.PointSetColocation3D( pointset )
if colocation_inspector.mesh_has_colocated_points():
raise ValueError( "[Test] (3D) PointSet has colocated points when it should have none." )
if not colocation_inspector.nb_colocated_points() == 0:
raise ValueError( "[Test] (3D) PointSet has more colocated points than it should." )
if colocation_inspector.colocated_points_groups():
raise ValueError( "[Test] (3D) PointSet points are shown colocated whereas they are not." )
def check_colocation3D():
pointset = geode.PointSet3D.create()
builder = geode.PointSetBuilder3D.create( pointset )
builder.create_vertices( 7 )
builder.set_point( 0, geode.Point3D( [ 0., 2., 1. ] ) )
builder.set_point( 1, geode.Point3D( [ 0., 2., 1. ] ) )
builder.set_point( 2, geode.Point3D( [ 0., 0., 0. ] ) )
builder.set_point( 3, geode.Point3D( [ 2., 0., 0. ] ) )
builder.set_point( 4, geode.Point3D( [ 1., 4., 3. ] ) )
builder.set_point(
5, geode.Point3D( [ 2., geode.global_epsilon / 2, geode.global_epsilon / 2 ] ) )
builder.set_point( 6, geode.Point3D( [ geode.global_epsilon / 1.1, 2., 1. ] ) )
colocation_inspector = inspector.PointSetColocation3D( pointset )
if not colocation_inspector.mesh_has_colocated_points():
raise ValueError( "[Test] (3D) PointSet doesn't have colocated points whereas it should have several." )
if not colocation_inspector.nb_colocated_points() == 3:
raise ValueError( "[Test] (3D) PointSet has wrong number of colocated points." )
first_colocated_points_group = [ 0, 1, 6 ]
if not colocation_inspector.colocated_points_groups()[0] == first_colocated_points_group:
raise ValueError( "[Test] (3D) PointSet has wrong first colocated points group." )
second_colocated_points_group = [ 3, 5 ]
if not colocation_inspector.colocated_points_groups()[1] == second_colocated_points_group:
raise ValueError( "[Test] (3D) PointSet has wrong second colocated points group." )
if __name__ == '__main__':
check_non_colocation2D()
check_colocation2D()
check_non_colocation3D()
check_colocation3D()
|
py | 1a43bcd8281e8d2dffdaf07e95c36994a9463b20 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 26 11:08:40 2021
@author: leonl42
Unit test for testing the lowercase conversion in the preprocessing pipeline
"""
import unittest
import pandas as pd
from scripts.preprocessing.lower import Lower
from scripts.util import COLUMN_TWEET
class LowerTest(unittest.TestCase):
""""Test the lowercase preprocessing step"""
def setUp(self):
self._df = pd.DataFrame()
# make one random string and copy it, but replace every upper letter with the corresponding lower one in this copy
_string_to_test = "I WHISH thath this ##2#E220md STRING becomes LoWerCase ÄÜÖÄ"
self._expected_result = "i whish thath this ##2#e220md string becomes lowercase äüöä"
self._df[COLUMN_TWEET] = [_string_to_test]
self._lower = Lower()
def test_lowercase(self):
"""Test lowercase conversion on a predefined string"""
lowercase_string = self._lower.fit_transform(self._df)
self.assertEqual(lowercase_string[COLUMN_TWEET][0], self._expected_result)
if __name__ == "__main__":
unittest.main() |
py | 1a43bd505571a6fa5c887f2028bc1441b282eb8d | import os
import numpy as np
from PIL import Image
from matplotlib import pyplot as plt
class PlotUtils(object):
@staticmethod
def plot_region(df, x0, x1, y0, y1, text=True):
"""
Plot the region of the mapping space bounded by the given x and y limits.
"""
FS = (10, 8)
fig, ax = plt.subplots(figsize=FS)
pts = df[
(df.x >= x0) & (df.x <= x1)
& (df.y >= y0) & (df.y <= y1)
]
ax.scatter(pts.x, pts.y, alpha=.6)
ax.set_xlim(x0, x1)
ax.set_ylim(y0, y1)
if text:
texts = []
for label, x, y in zip(pts.short_file.values, pts.x.values, pts.y.values):
t = ax.annotate(label, xy=(x, y))
texts.append(t)
return ax
@staticmethod
def plot_region_around(df, title, margin=5, **kwargs):
"""
Plot the region of the mapping space in the neighbourhood of the image with
the given name. The margin parameter controls the size of the neighbourhood around the image.
"""
xmargin = ymargin = margin
match = df[df.short_file == title]
assert len(match) == 1
row = match.iloc[0]
return PlotUtils.plot_region(df, row.x - xmargin, row.x + xmargin, row.y - ymargin, row.y + ymargin, **kwargs)
@staticmethod
def plot_images_cluster(df, embs, output_path, width = 4000, height = 3000, max_dim = 100):
"""
Plot the images cluster.
:param df:
:param embs: tsne embeddings, an array of unnormalized 2d points.
:return:
"""
# The variable tsne contains an array of unnormalized 2d points, corresponding to the embedding.
# We normalize the embedding so that lies entirely in the range (0,1).
tx, ty = embs[:, 0], embs[:, 1]
tx = (tx - np.min(tx)) / (np.max(tx) - np.min(tx))
ty = (ty - np.min(ty)) / (np.max(ty) - np.min(ty))
full_image = Image.new('RGBA', (width, height))
# Finally, we will compose a new RGB image where the set of images have been drawn according to the t-SNE
# results. Adjust width and height to set the size in pixels of the full image, and set max_dim to
# the pixel size (on the largest size) to scale images to.
for img, x, y in zip(df['file'].values, tx, ty):
tile = Image.open(img)
rs = max(1, tile.width / max_dim, tile.height / max_dim)
tile = tile.resize((int(tile.width / rs), int(tile.height / rs)), Image.ANTIALIAS)
full_image.paste(tile, (int((width - max_dim) * x), int((height - max_dim) * y)), mask=tile.convert('RGBA'))
full_image.show()
full_image.save(os.path.join(output_path,"cluster.png"),"PNG")
resized_image = full_image.resize((int(width / 5), int(height / 5)))
resized_image.save(os.path.join(output_path, "resized_cluster.png"), "PNG")
|
py | 1a43bdacc58278cc72b7194aa2524ced3d5528f4 | import numpy as np
import math
from keras.initializers import VarianceScaling
from keras.models import model_from_json
from keras.models import Sequential, Model
#from keras.engine.training import collect_trainable_weights
from keras.layers import Dense, Flatten, Input, Add, merge, Lambda
from keras.optimizers import Adam
import tensorflow as tf
import keras.backend as K
HIDDEN1_UNITS = 300
HIDDEN2_UNITS = 600
class ActorNetwork(object):
def __init__(self, sess, state_size, action_size, BATCH_SIZE, TAU, LEARNING_RATE):
self.sess = sess
self.BATCH_SIZE = BATCH_SIZE
self.TAU = TAU
self.LEARNING_RATE = LEARNING_RATE
K.set_session(sess)
#Now create the model
self.model , self.weights, self.state = self.create_actor_network(state_size, action_size)
self.target_model, self.target_weights, self.target_state = self.create_actor_network(state_size, action_size)
self.action_gradient = tf.placeholder(tf.float32,[None, action_size])
self.params_grad = tf.gradients(self.model.output, self.weights, -self.action_gradient)
grads = zip(self.params_grad, self.weights)
self.optimize = tf.train.AdamOptimizer(LEARNING_RATE).apply_gradients(grads)
self.sess.run(tf.initialize_all_variables())
def train(self, states, action_grads):
self.sess.run(self.optimize, feed_dict={
self.state: states,
self.action_gradient: action_grads
})
def target_train(self):
actor_weights = self.model.get_weights()
actor_target_weights = self.target_model.get_weights()
for i in xrange(len(actor_weights)):
actor_target_weights[i] = self.TAU * actor_weights[i] + (1 - self.TAU)* actor_target_weights[i]
self.target_model.set_weights(actor_target_weights)
def create_actor_network(self, state_size,action_dim):
print("Now we build the model")
S = Input(shape=[state_size])
h0 = Dense(HIDDEN1_UNITS, activation='relu')(S)
h1 = Dense(HIDDEN2_UNITS, activation='relu')(h0)
action_list = []
for i in range(action_dim):
action = Dense(1,activation='sigmoid',init=lambda shape: VarianceScaling(scale=1e-4)(shape))(h1)
action_list.append(action)
#V = Add()(action_list) # mode='concat'
V = merge(action_list,mode='concat')
model = Model(output=V, input=S)
return model, model.trainable_weights, S
|
py | 1a43be9efe45186a4924240ff809153ac74dd20a | from __future__ import absolute_import
import socket
from pickle import loads, dumps
from celery import states
from celery.exceptions import ImproperlyConfigured
from celery.tests.case import (
AppCase, Mock, mock_module, depends_on_current_app,
)
class Object(object):
pass
def install_exceptions(mod):
# py3k: cannot catch exceptions not ineheriting from BaseException.
class NotFoundException(Exception):
pass
class TException(Exception):
pass
class InvalidRequestException(Exception):
pass
class UnavailableException(Exception):
pass
class TimedOutException(Exception):
pass
class AllServersUnavailable(Exception):
pass
mod.NotFoundException = NotFoundException
mod.TException = TException
mod.InvalidRequestException = InvalidRequestException
mod.TimedOutException = TimedOutException
mod.UnavailableException = UnavailableException
mod.AllServersUnavailable = AllServersUnavailable
class test_CassandraBackend(AppCase):
def setup(self):
self.app.conf.update(
CASSANDRA_SERVERS=['example.com'],
CASSANDRA_KEYSPACE='keyspace',
CASSANDRA_COLUMN_FAMILY='columns',
)
def test_init_no_pycassa(self):
with mock_module('pycassa'):
from celery.backends import cassandra as mod
prev, mod.pycassa = mod.pycassa, None
try:
with self.assertRaises(ImproperlyConfigured):
mod.CassandraBackend(app=self.app)
finally:
mod.pycassa = prev
def test_init_with_and_without_LOCAL_QUROM(self):
with mock_module('pycassa'):
from celery.backends import cassandra as mod
mod.pycassa = Mock()
install_exceptions(mod.pycassa)
cons = mod.pycassa.ConsistencyLevel = Object()
cons.LOCAL_QUORUM = 'foo'
self.app.conf.CASSANDRA_READ_CONSISTENCY = 'LOCAL_FOO'
self.app.conf.CASSANDRA_WRITE_CONSISTENCY = 'LOCAL_FOO'
mod.CassandraBackend(app=self.app)
cons.LOCAL_FOO = 'bar'
mod.CassandraBackend(app=self.app)
# no servers raises ImproperlyConfigured
with self.assertRaises(ImproperlyConfigured):
self.app.conf.CASSANDRA_SERVERS = None
mod.CassandraBackend(
app=self.app, keyspace='b', column_family='c',
)
@depends_on_current_app
def test_reduce(self):
with mock_module('pycassa'):
from celery.backends.cassandra import CassandraBackend
self.assertTrue(loads(dumps(CassandraBackend(app=self.app))))
def test_get_task_meta_for(self):
with mock_module('pycassa'):
from celery.backends import cassandra as mod
mod.pycassa = Mock()
install_exceptions(mod.pycassa)
mod.Thrift = Mock()
install_exceptions(mod.Thrift)
x = mod.CassandraBackend(app=self.app)
Get_Column = x._get_column_family = Mock()
get_column = Get_Column.return_value = Mock()
get = get_column.get
META = get.return_value = {
'task_id': 'task_id',
'status': states.SUCCESS,
'result': '1',
'date_done': 'date',
'traceback': '',
'children': None,
}
x.decode = Mock()
x.detailed_mode = False
meta = x._get_task_meta_for('task_id')
self.assertEqual(meta['status'], states.SUCCESS)
x.detailed_mode = True
row = get.return_value = Mock()
row.values.return_value = [Mock()]
x.decode.return_value = META
meta = x._get_task_meta_for('task_id')
self.assertEqual(meta['status'], states.SUCCESS)
x.decode.return_value = Mock()
x.detailed_mode = False
get.side_effect = KeyError()
meta = x._get_task_meta_for('task_id')
self.assertEqual(meta['status'], states.PENDING)
calls = [0]
end = [10]
def work_eventually(*arg):
try:
if calls[0] > end[0]:
return META
raise socket.error()
finally:
calls[0] += 1
get.side_effect = work_eventually
x._retry_timeout = 10
x._retry_wait = 0.01
meta = x._get_task_meta_for('task')
self.assertEqual(meta['status'], states.SUCCESS)
x._retry_timeout = 0.1
calls[0], end[0] = 0, 100
with self.assertRaises(socket.error):
x._get_task_meta_for('task')
def test_store_result(self):
with mock_module('pycassa'):
from celery.backends import cassandra as mod
mod.pycassa = Mock()
install_exceptions(mod.pycassa)
mod.Thrift = Mock()
install_exceptions(mod.Thrift)
x = mod.CassandraBackend(app=self.app)
Get_Column = x._get_column_family = Mock()
cf = Get_Column.return_value = Mock()
x.detailed_mode = False
x._store_result('task_id', 'result', states.SUCCESS)
self.assertTrue(cf.insert.called)
cf.insert.reset()
x.detailed_mode = True
x._store_result('task_id', 'result', states.SUCCESS)
self.assertTrue(cf.insert.called)
def test_process_cleanup(self):
with mock_module('pycassa'):
from celery.backends import cassandra as mod
x = mod.CassandraBackend(app=self.app)
x._column_family = None
x.process_cleanup()
x._column_family = True
x.process_cleanup()
self.assertIsNone(x._column_family)
def test_get_column_family(self):
with mock_module('pycassa'):
from celery.backends import cassandra as mod
mod.pycassa = Mock()
install_exceptions(mod.pycassa)
x = mod.CassandraBackend(app=self.app)
self.assertTrue(x._get_column_family())
self.assertIsNotNone(x._column_family)
self.assertIs(x._get_column_family(), x._column_family)
|
py | 1a43bf1a306e0ba3fd98b20fe16740b24ea4e02e | """Automatic Domain Randomization (ADR) algorithm
Introduced in:
Akkaya, Ilge, et al. "Solving rubik's cube with a robot hand."
arXiv preprint arXiv:1910.07113 (2019).
"""
import random
from inspect import signature
from typing import Any, AnyStr, Union, Sequence, Optional
import numpy as np
from collections import OrderedDict
from simmod.algorithms.udr import UniformDomainRandomization
from simmod.modification.base_modifier import BaseModifier
from simmod.common.parametrization import Parametrization
from simmod.common.parametrization import Execution
EXECUTION_POINTS = Union[Execution]
class AutomaticDomainRandomization(UniformDomainRandomization):
def __init__(self, *modifiers: BaseModifier, random_state: Optional[
np.random.Generator] = None,
buffer_threshold, performance_thresholds: Sequence,
step_size, **kwargs:
Any) -> \
None:
if len(performance_thresholds) > 2 or performance_thresholds[0] > \
performance_thresholds[1]:
raise ValueError("'performance_thresholds' should be Tuple "
"containing two values whereas the first "
"corresponds to the lower threshold t_L and the "
"second to the upper threshold t_H (t_L < t_H)")
if random_state is None:
self.random_state = np.random.default_rng()
elif isinstance(random_state, int):
# random_state assumed to be an int
self.random_state = np.random.RandomState(random_state)
else:
self.random_state = random_state
super().__init__(*modifiers, random_state=random_state, **kwargs)
buffer = OrderedDict()
for modifier in self.modifiers:
buffer[modifier] = OrderedDict()
for instrumentation in modifier.instrumentation:
buffer[modifier][instrumentation] = ([], [])
def _bound_value(self, modifier: BaseModifier, instrumentation: Parametrization,
bound_low: bool):
a, b = instrumentation.parameter_values
object_name = instrumentation.object_name
setter_func = modifier.standard_setters[instrumentation.setter]
if setter_func.__defaults__ is not None: # in case there are no kwargs
n_kwargs = len(setter_func.__defaults__)
else:
n_kwargs = 0
sig = signature(setter_func)
n_params = len(
sig.parameters) - n_kwargs - 1 # Exclude name & non-positional arguments
# TODO: Randomize non-positional arguments
new_values = instrumentation.sample(n_params)
if bound_low:
new_values[0] = a
else:
new_values[1] = b
instrumentation.update(new_values)
return setter_func(object_name, *new_values)
def adapt_boundaries(self, instrumentation: Parametrization,
step_size: float, select_low: bool):
pass
def entropy(self):
n = 0
entropy = 0
for modifier in self.modifiers:
for instrumentation in modifier.instrumentation:
entropy += instrumentation.entropy
n += 1
assert n != 0
return entropy / n
def step(self, execution: EXECUTION_POINTS = 'RESET', **kwargs) -> None:
mod = random.choice(self.modifiers)
bounded_param = random.choice(mod.instrumentation)
x = self.random_state.uniform()
select_low = (x < 0.5)
for modifier in self.modifiers:
for instrumentation in modifier.instrumentation:
if instrumentation is bounded_param:
self._bound_value(modifier, instrumentation, select_low)
else:
self._randomize_object(modifier, instrumentation)
|
py | 1a43bfd7f4983d05e300a8c9504a42028f3c4a42 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# File: model_desc.py
# Author: Yuxin Wu <[email protected]>
from abc import ABCMeta, abstractmethod
import re
import tensorflow as tf
from collections import namedtuple
import inspect
import pickle
from ..utils import logger, INPUT_VARS_KEY
from ..tfutils.common import get_tensors_by_names
from ..tfutils.gradproc import CheckGradient
from ..tfutils.tower import get_current_tower_context
__all__ = ['ModelDesc', 'InputVar', 'ModelFromMetaGraph' ]
#_InputVar = namedtuple('InputVar', ['type', 'shape', 'name', 'sparse'])
class InputVar(object):
def __init__(self, type, shape, name, sparse=False):
self.type = type
self.shape = shape
self.name = name
self.sparse = sparse
def dumps(self):
return pickle.dumps(self)
@staticmethod
def loads(buf):
return pickle.loads(buf)
class ModelDesc(object):
""" Base class for a model description """
__metaclass__ = ABCMeta
def get_input_vars(self):
"""
Create or return (if already created) raw input TF placeholder vars in the graph.
:returns: the list of raw input vars in the graph
"""
if hasattr(self, 'reuse_input_vars'):
return self.reuse_input_vars
ret = self.get_placeholders()
self.reuse_input_vars = ret
return ret
def get_placeholders(self, prefix=''):
""" build placeholders with optional prefix, for each InputVar
"""
input_vars = self._get_input_vars()
for v in input_vars:
tf.add_to_collection(INPUT_VARS_KEY, v.dumps())
ret = []
for v in input_vars:
placehdr_f = tf.placeholder if not v.sparse else tf.sparse_placeholder
ret.append(placehdr_f(
v.type, shape=v.shape,
name=prefix + v.name))
return ret
def get_input_vars_desc(self):
""" return a list of `InputVar` instance"""
return self._get_input_vars()
@abstractmethod
def _get_input_vars(self):
""":returns: a list of InputVar """
def build_graph(self, model_inputs):
"""
Setup the whole graph.
:param model_inputs: a list of input variable in the graph.
:param is_training: a boolean
:returns: the cost to minimize. a scalar variable
"""
if len(inspect.getargspec(self._build_graph).args) == 3:
logger.warn("[DEPRECATED] _build_graph(self, input_vars, is_training) is deprecated! \
Use _build_graph(self, input_vars) and get_current_tower_context().is_training instead.")
self._build_graph(model_inputs, get_current_tower_context().is_training)
else:
self._build_graph(model_inputs)
@abstractmethod
def _build_graph(self, inputs):
pass
def get_cost(self):
return self._get_cost()
def _get_cost(self, *args):
return self.cost
def get_gradient_processor(self):
""" Return a list of GradientProcessor. They will be executed in order"""
return [#SummaryGradient(),
CheckGradient()
]
class ModelFromMetaGraph(ModelDesc):
"""
Load the whole exact TF graph from a saved meta_graph.
Only useful for inference.
"""
def __init__(self, filename):
tf.train.import_meta_graph(filename)
all_coll = tf.get_default_graph().get_all_collection_keys()
for k in [INPUT_VARS_KEY, tf.GraphKeys.TRAINABLE_VARIABLES,
tf.GraphKeys().VARIABLES]:
assert k in all_coll, \
"Collection {} not found in metagraph!".format(k)
def _get_input_vars(self):
col = tf.get_collection(INPUT_VARS_KEY)
col = [InputVar.loads(v) for v in col]
return col
def _build_graph(self, _, __):
""" Do nothing. Graph was imported already """
pass
|
py | 1a43c1370a53e2b329c022d6022b8434d603518e | # Copyright 2020, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl.testing import absltest
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from tensorflow_federated.python.common_libs import test_utils
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import computations
from tensorflow_federated.python.core.api import intrinsics
from tensorflow_federated.python.core.impl.executors import executor_base
from tensorflow_federated.python.core.impl.executors import executor_stacks
from tensorflow_federated.python.core.impl.executors import executor_test_utils
def _create_tff_parallel_clients_with_dataset_reduce():
@tf.function
def reduce_fn(x, y):
return x + y
@tf.function
def dataset_reduce_fn(ds, initial_val):
return ds.reduce(initial_val, reduce_fn)
@computations.tf_computation(computation_types.SequenceType(tf.int64))
def dataset_reduce_fn_wrapper(ds):
initial_val = tf.Variable(np.int64(1.0))
return dataset_reduce_fn(ds, initial_val)
@computations.federated_computation(
computation_types.at_clients(computation_types.SequenceType(tf.int64)))
def parallel_client_run(client_datasets):
return intrinsics.federated_map(dataset_reduce_fn_wrapper, client_datasets)
return parallel_client_run
def _create_tff_parallel_clients_with_iter_dataset():
@tf.function
def reduce_fn(x, y):
return x + y
@tf.function
def dataset_reduce_fn(ds, initial_val):
for batch in iter(ds):
initial_val = reduce_fn(initial_val, batch)
return initial_val
@computations.tf_computation(computation_types.SequenceType(tf.int64))
def dataset_reduce_fn_wrapper(ds):
initial_val = tf.Variable(np.int64(1.0))
return dataset_reduce_fn(ds, initial_val)
@computations.federated_computation(
computation_types.at_clients(computation_types.SequenceType(tf.int64)))
def parallel_client_run(client_datasets):
return intrinsics.federated_map(dataset_reduce_fn_wrapper, client_datasets)
return parallel_client_run
class MultiGPUTest(tf.test.TestCase, parameterized.TestCase):
def setUp(self):
super().setUp()
test_utils.create_logical_multi_gpus()
def _create_logical_multi_gpus(self):
# Multiple logical GPU devices will be created for tests in this module.
# Only call this function once as logical deviceds have to be created before
# listed in each indivisual test.
gpu_devices = tf.config.list_physical_devices('GPU')
if not gpu_devices:
# TODO(b/168138878): switch to raise and move out of MultiGPUTest
self.skipTest('Skip GPU tests when no GPU is provided')
if len(gpu_devices) == 1:
tf.config.set_logical_device_configuration(gpu_devices[0], [
tf.config.LogicalDeviceConfiguration(memory_limit=128),
tf.config.LogicalDeviceConfiguration(memory_limit=128)
])
@parameterized.named_parameters(
('server_on_cpu', 'CPU'),
('server_on_gpu', 'GPU'),
)
def test_create_executor_with_client_mgpu(self, tf_device):
tf_devices = tf.config.list_logical_devices(tf_device)
server_tf_device = None if not tf_devices else tf_devices[0]
gpu_devices = tf.config.list_logical_devices('GPU')
unplaced_factory = executor_stacks.UnplacedExecutorFactory(
use_caching=True,
server_device=server_tf_device,
client_devices=gpu_devices)
unplaced_executor = unplaced_factory.create_executor()
self.assertIsInstance(unplaced_executor, executor_base.Executor)
@parameterized.named_parameters(
('server_on_cpu', 'CPU'),
('server_on_gpu', 'GPU'),
)
def test_local_executor_multi_gpus_iter_dataset(self, tf_device):
tf_devices = tf.config.list_logical_devices(tf_device)
server_tf_device = None if not tf_devices else tf_devices[0]
gpu_devices = tf.config.list_logical_devices('GPU')
local_executor = executor_stacks.local_executor_factory(
server_tf_device=server_tf_device, client_tf_devices=gpu_devices)
with executor_test_utils.install_executor(local_executor):
parallel_client_run = _create_tff_parallel_clients_with_iter_dataset()
client_data = [
tf.data.Dataset.range(10),
tf.data.Dataset.range(10).map(lambda x: x + 1)
]
client_results = parallel_client_run(client_data)
self.assertEqual(client_results, [np.int64(46), np.int64(56)])
@parameterized.named_parameters(
('server_on_cpu', 'CPU'),
('server_on_gpu', 'GPU'),
)
def test_local_executor_multi_gpus_dataset_reduce(self, tf_device):
tf_devices = tf.config.list_logical_devices(tf_device)
server_tf_device = None if not tf_devices else tf_devices[0]
gpu_devices = tf.config.list_logical_devices('GPU')
local_executor = executor_stacks.local_executor_factory(
server_tf_device=server_tf_device, client_tf_devices=gpu_devices)
with executor_test_utils.install_executor(local_executor):
parallel_client_run = _create_tff_parallel_clients_with_dataset_reduce()
client_data = [
tf.data.Dataset.range(10),
tf.data.Dataset.range(10).map(lambda x: x + 1)
]
# TODO(b/159180073): merge this one into iter dataset test when the
# dataset reduce function can be correctly used for GPU device.
with self.assertRaisesRegex(
ValueError,
'Detected dataset reduce op in multi-GPU TFF simulation.*'):
parallel_client_run(client_data)
if __name__ == '__main__':
absltest.main()
|
py | 1a43c13fc7d540b5ea2b491c99e324d4abdebb40 | """ Disc Controller """
__docformat__ = "numpy"
import argparse
import os
from typing import List
from datetime import datetime
from matplotlib import pyplot as plt
from prompt_toolkit.completion import NestedCompleter
from gamestonk_terminal import feature_flags as gtff
from gamestonk_terminal.helper_funcs import get_flair
from gamestonk_terminal.menu import session
from gamestonk_terminal.helper_funcs import (
parse_known_args_and_warn,
check_non_negative,
check_positive,
valid_date,
check_int_range,
)
from gamestonk_terminal.stocks.discovery import (
ark_view,
fidelity_view,
seeking_alpha_view,
shortinterest_view,
yahoofinance_view,
finnhub_view,
geekofwallstreet_view,
)
class DiscoveryController:
"""Discovery Controller"""
# Command choices
CHOICES = [
"?",
"cls",
"help",
"q",
"quit",
]
CHOICES_COMMANDS = [
"pipo",
"fipo",
"gainers",
"losers",
"ugs",
"gtech",
"active",
"ulc",
"asc",
"ford",
"arkord",
"upcoming",
"latest",
"trending",
"lowfloat",
"hotpenny",
"rtearn",
]
CHOICES += CHOICES_COMMANDS
def __init__(self):
"""Constructor"""
self.disc_parser = argparse.ArgumentParser(add_help=False, prog="disc")
self.disc_parser.add_argument(
"cmd",
choices=self.CHOICES,
)
@staticmethod
def print_help():
"""Print help"""
help_text = """
Discovery:
cls clear screen
?/help show this menu again
q quit this menu, and shows back to main menu
quit quit to abandon program
Geek of Wall St:
rtearn realtime earnings from and expected moves
Finnhub:
pipo past IPOs dates
fipo future IPOs dates
Yahoo Finance:
gainers show latest top gainers
losers show latest top losers
ugs undervalued stocks with revenue and earnings growth in excess of 25%
gtech tech stocks with revenue and earnings growth more than 25%
active most active stocks by intraday trade volume
ulc potentially undervalued large cap stocks
asc small cap stocks with earnings growth rates better than 25%
Fidelity:
ford orders by Fidelity Customers
cathiesark.com:
arkord orders by ARK Investment Management LLC
Seeking Alpha:
upcoming upcoming earnings release dates
latest latest news
trending trending news
shortinterest.com
lowfloat low float stocks under 10M shares float
pennystockflow.com
hotpenny today's hot penny stocks
"""
print(help_text)
def switch(self, an_input: str):
"""Process and dispatch input
Returns
-------
True, False or None
False - quit the menu
True - quit the program
None - continue in the menu
"""
# Empty command
if not an_input:
print("")
return None
(known_args, other_args) = self.disc_parser.parse_known_args(an_input.split())
# Help menu again
if known_args.cmd == "?":
self.print_help()
return None
# Clear screen
if known_args.cmd == "cls":
os.system("cls||clear")
return None
return getattr(
self, "call_" + known_args.cmd, lambda: "Command not recognized!"
)(other_args)
def call_help(self, _):
"""Process Help command"""
self.print_help()
def call_q(self, _):
"""Process Q command - quit the menu"""
return False
def call_quit(self, _):
"""Process Quit command - quit the program"""
return True
def call_rtearn(self, other_args: List[str]):
"""Process rtearn command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="rtearn",
description="""
Realtime earnings data and expected moves. [Source: https://thegeekofwallstreet.com]
""",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
geekofwallstreet_view.display_realtime_earnings(ns_parser.export)
except Exception as e:
print(e, "\n")
def call_pipo(self, other_args: List[str]):
"""Process pipo command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="pipo",
description="""
Past IPOs dates. [Source: https://finnhub.io]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_non_negative,
default=5,
help="Number of past days to look for IPOs.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
finnhub_view.past_ipo(
num_days_behind=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_fipo(self, other_args: List[str]):
"""Process fipo command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="fipo",
description="""
Future IPOs dates. [Source: https://finnhub.io]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_non_negative,
default=5,
help="Number of future days to look for IPOs.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args and "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
finnhub_view.future_ipo(
num_days_ahead=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_gainers(self, other_args: List[str]):
"""Process gainers command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="gainers",
description="Print up to 25 top gainers. [Source: Yahoo Finance]",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_int_range(1, 25),
default=5,
help="Number of stocks to display.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
yahoofinance_view.display_gainers(
num_stocks=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_losers(self, other_args: List[str]):
"""Process losers command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="losers",
description="Print up to 25 top losers. [Source: Yahoo Finance]",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_int_range(1, 25),
default=5,
help="Number of stocks to display.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
yahoofinance_view.display_losers(
num_stocks=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_ugs(self, other_args: List[str]):
"""Process ugs command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ugs",
description="""
Print up to 25 undervalued stocks with revenue and earnings growth in excess of 25%.
[Source: Yahoo Finance]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_int_range(1, 25),
default=5,
help="Number of stocks to display.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
yahoofinance_view.display_ugs(
num_stocks=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_gtech(self, other_args: List[str]):
"""Process gtech command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="gtech",
description="""
Print up to 25 top tech stocks with revenue and earnings growth in excess of 25%. [Source: Yahoo Finance]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_int_range(1, 25),
default=5,
help="Number of stocks to display.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
yahoofinance_view.display_gtech(
num_stocks=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_active(self, other_args: List[str]):
"""Process active command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="active",
description="""
Print up to 25 top most actively traded intraday tickers. [Source: Yahoo Finance]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_int_range(1, 25),
default=5,
help="Number of stocks to display.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
yahoofinance_view.display_active(
num_stocks=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_ulc(self, other_args: List[str]):
"""Process ulc command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ulc",
description="""
Print up to 25 potentially undervalued large cap stocks. [Source: Yahoo Finance]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_int_range(1, 25),
default=5,
help="Number of the stocks to display.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
yahoofinance_view.display_ulc(
num_stocks=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_asc(self, other_args: List[str]):
"""Process asc command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="asc",
description="""
Print up to 25 small cap stocks with earnings growth rates better than 25%. [Source: Yahoo Finance]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="num",
type=check_int_range(1, 25),
default=5,
help="Number of the stocks to display.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
yahoofinance_view.display_asc(
num_stocks=ns_parser.num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_ford(self, other_args: List[str]):
"""Process ford command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ford",
description="""
Orders by Fidelity customers. Information shown in the table below
is based on the volume of orders entered on the "as of" date shown. Securities
identified are not recommended or endorsed by Fidelity and are displayed for
informational purposes only. [Source: Fidelity]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="n_num",
type=check_positive,
default=10,
help="Number of top ordered stocks to be printed.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
fidelity_view.orders_view(
num=ns_parser.n_num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_arkord(self, other_args: List[str]):
"""Process arkord command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="arkord",
description="""
Orders by ARK Investment Management LLC - https://ark-funds.com/. [Source: https://cathiesark.com]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="n_num",
type=check_positive,
default=10,
help="Last N ARK orders.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
ark_view.ark_orders_view(
num=ns_parser.n_num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_upcoming(self, other_args: List[str]):
# TODO: switch to nasdaq
"""Process upcoming command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="upcoming",
description="""Upcoming earnings release dates. [Source: Seeking Alpha]""",
)
parser.add_argument(
"-p",
"--pages",
action="store",
dest="n_pages",
type=check_positive,
default=10,
help="Number of pages to read upcoming earnings from in Seeking Alpha website.",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="n_num",
type=check_positive,
default=1,
help="Number of upcoming earnings release dates to display",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-n")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
seeking_alpha_view.upcoming_earning_release_dates(
num_pages=ns_parser.n_pages,
num_earnings=ns_parser.n_num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_latest(self, other_args: List[str]):
"""Process latest command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="latest",
description="""Latest news articles. [Source: Seeking Alpha]""",
)
parser.add_argument(
"-i",
"--id",
action="store",
dest="n_id",
type=check_positive,
default=-1,
help="article ID",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="n_num",
type=check_positive,
default=5,
help="number of articles being printed",
)
parser.add_argument(
"-d",
"--date",
action="store",
dest="s_date",
type=valid_date,
default=datetime.now().strftime("%Y-%m-%d"),
help="starting date of articles",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-i")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
seeking_alpha_view.news(
news_type="latest",
article_id=ns_parser.n_id,
num=ns_parser.n_num,
start_date=ns_parser.s_date,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_trending(self, other_args: List[str]):
"""Process trending command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="trending",
description="""Trending news articles. [Source: Seeking Alpha]""",
)
parser.add_argument(
"-i",
"--id",
action="store",
dest="n_id",
type=check_positive,
default=-1,
help="article ID",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="n_num",
type=check_positive,
default=5,
help="number of articles being printed",
)
parser.add_argument(
"-d",
"--date",
action="store",
dest="s_date",
type=valid_date,
default=datetime.now().strftime("%Y-%m-%d"),
help="starting date of articles",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-i")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
seeking_alpha_view.news(
news_type="trending",
article_id=ns_parser.n_id,
num=ns_parser.n_num,
start_date=ns_parser.s_date,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_lowfloat(self, other_args: List[str]):
"""Process lowfloat command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="lowfloat",
description="""
Print top stocks with lowest float. LowFloat.com provides a convenient
sorted database of stocks which have a float of under 10 million shares. Additional key
data such as the number of outstanding shares, short interest, and company industry is
displayed. Data is presented for the Nasdaq Stock Market, the New York Stock Exchange,
the American Stock Exchange, and the Over the Counter Bulletin Board. [Source: www.lowfloat.com]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="n_num",
type=check_positive,
default=10,
help="Number of top stocks to print.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
shortinterest_view.low_float(
num=ns_parser.n_num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def call_hotpenny(self, other_args: List[str]):
"""Process hotpenny command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="hotpenny",
description="""
This site provides a list of todays most active and hottest penny stocks. While not for everyone, penny
stocks can be exciting and rewarding investments in many ways. With penny stocks, you can get more bang
for the buck. You can turn a few hundred dollars into thousands, just by getting in on the right penny
stock at the right time. Penny stocks are increasing in popularity. More and more investors of all age
groups and skill levels are getting involved, and the dollar amounts they are putting into these
speculative investments are representing a bigger portion of their portfolios.
[Source: www.pennystockflow.com]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="n_num",
type=check_positive,
default=10,
help="Number of top stocks to print.",
)
parser.add_argument(
"--export",
choices=["csv", "json", "xlsx"],
default="",
type=str,
dest="export",
help="Export dataframe data to csv,json,xlsx file",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
shortinterest_view.hot_penny_stocks(
num=ns_parser.n_num,
export=ns_parser.export,
)
except Exception as e:
print(e, "\n")
def menu():
"""Discovery Menu"""
disc_controller = DiscoveryController()
disc_controller.call_help(None)
# Loop forever and ever
while True:
# Get input command from user
if session and gtff.USE_PROMPT_TOOLKIT:
completer = NestedCompleter.from_nested_dict(
{c: None for c in disc_controller.CHOICES}
)
an_input = session.prompt(
f"{get_flair()} (stocks)>(disc)> ",
completer=completer,
)
else:
an_input = input(f"{get_flair()} (stocks)>(disc)> ")
try:
plt.close("all")
process_input = disc_controller.switch(an_input)
if process_input is not None:
return process_input
except SystemExit:
print("The command selected doesn't exist\n")
continue
|
py | 1a43c18b9af80bbf184f3ba3d817891986e218b8 | #!/usr/bin/env python3
import os
import sys
import json
from argparse import ArgumentParser
from source.fmp import ProfileFMP
from source.create_sheet import create_excel
data_dir = os.path.join(os.path.dirname(__file__), 'data')
if not os.path.isdir(data_dir):
os.makedirs(data_dir)
def fetch_data_by_symbol(symbol):
fmp_company = ProfileFMP(symbol)
return {
'symbol': symbol,
'profile': fmp_company.profile,
'rating': fmp_company.rating,
'income': fmp_company.income,
}
def load(symbol):
company = fetch_data_by_symbol(symbol)
filename = os.path.join(data_dir, symbol + '.json')
with open(filename, 'w') as file:
json.dump(company, file)
create_excel(data_dir, company, symbol)
parser = ArgumentParser()
subparsers = parser.add_subparsers(dest="action", title='Subcommands')
load_parser = subparsers.add_parser('load', help='laod data')
load_parser.add_argument('symbols', type=str, nargs='*', help='Stock symbol')
args = sys.argv[1:]
args = parser.parse_args(args)
if args.action == 'load':
symbols = args.symbols
for symbol in symbols:
print("Loading data for {}.".format(symbol))
load(symbol)
sys.exit(0)
else:
parser.error('Unknown command: ' + repr(args.action))
|
py | 1a43c21abff9067c649a714496986b157c6cb357 | import csv
from functools import lru_cache
from pathlib import Path
from typing import Dict
from typing import Iterable
from typing import Optional
from typing import Set
from typing import Tuple
from typing import Type
from typing import Union
import warnings
from django.apps import AppConfig
from django.apps import apps
from django.conf import settings
import django.contrib.auth
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Model
from django.http import HttpRequest
from django.utils.module_loading import import_string
from .types import Evaluator
from .types import PermName
from .types import ResolveEvaluatorFunc
from .types import ResolvePermNameFunc
from .types import UnresolvedEvaluator
from .types import UserType
def default_resolve_perm_name(
app_config: AppConfig, model: Type[Model], action: str, is_global: bool
) -> str:
if model:
default_codename = django.contrib.auth.get_permission_codename(action, model._meta)
permission_name = f"{app_config.label}.{default_codename}"
else:
permission_name = f"{app_config.label}.{action}"
return permission_name
def default_get_user_type(user: Model) -> Optional[str]:
# note that AnonymousUser won't have a user_type so we need to deal with that gracefully
return getattr(user, "user_type", None)
def _parse_csv(
file_path: Path,
resolve_permission_name_func: ResolvePermNameFunc,
) -> Tuple[
Dict[PermName, bool],
Dict[PermName, Dict[UserType, UnresolvedEvaluator]],
Iterable[str],
]:
"""
Parses the CSV of user_type permissions returns data for further processing.
See README.md for the CSV file format
:return: A tuple of three elements:
- A dict mapping permission name to bool of whether that permission is global or not
- A dict mapping a permission to a dict of user_types to partially resolved permission details:
permission_name: {
user_type1: UnresolvedEvaluator,
...
user_typeN: UnresolvedEvaluator,
}
- A list of user types
"""
with open(file_path, "r") as csv_file:
reader = csv.reader(csv_file, skipinitialspace=True)
# get first row of headers
fieldnames = next(reader)
fieldnames = [x.strip() for x in fieldnames]
prelim_headers = ["Model", "App", "Action", "Is Global"]
prelim_header_count = len(prelim_headers)
if fieldnames[:prelim_header_count] != prelim_headers:
raise ValueError(f"Invalid csv_permissions CSV column headers found in {file_path}")
user_type_headers = fieldnames[prelim_header_count:]
nonempty_user_type_headers = [user_type for user_type in user_type_headers if user_type != ""]
if len(set(nonempty_user_type_headers)) != len(nonempty_user_type_headers):
duplicates = [x for x in nonempty_user_type_headers if nonempty_user_type_headers.count(x) >= 2]
raise ValueError(f"Duplicate csv_permissions CSV column header ({duplicates[0]}) found in {file_path}")
if len(nonempty_user_type_headers) == 0:
raise ValueError(f"Missing user_type headers in {file_path}")
perm_is_global = {}
perm_user_type_unresolved: Dict[PermName, Dict[UserType, UnresolvedEvaluator]] = {}
# We can't just count the number of permissions read because we don't consider
# a file with commented out lines to be empty so keep track with a flag
was_empty = True
for line_number, row in enumerate(reader):
row = [cell.strip() for cell in row]
was_empty = False
if all(x == "" for x in row):
# ignore completely empty rows
continue
if any(row[0].strip().startswith(comment_prefix) for comment_prefix in ("//", "#", ';')):
# Ignore lines beginning with comment chars
continue
if len(row) < prelim_header_count:
raise ValueError(f"Incomplete line {line_number} in {csv_file}")
# note that model capitalisation may differ to model._meta.model_name
model_name_orig, app_label, action, is_global = row[:prelim_header_count]
app_config = apps.get_app_config(app_label)
model = app_config.get_model(model_name_orig) if model_name_orig else None
if is_global == "yes":
is_global = True
elif is_global == "no":
is_global = False
else:
raise ValueError("Invalid value for Is Global: should be 'yes' or 'no'.")
permission = resolve_permission_name_func(app_config, model, action, is_global)
if permission not in perm_is_global:
perm_is_global[permission] = is_global
perm_user_type_unresolved[permission] = {}
for i, user_type in enumerate(user_type_headers):
try:
evaluator_name = row[prelim_header_count + i]
except IndexError:
continue
if user_type == "":
# if a column has an empty user type then that's allowed but only if the entire column is empty
if evaluator_name != "":
raise ValueError(f"Columns with an empty user_type must be completely empty")
else:
perm_user_type_unresolved[permission][user_type] = UnresolvedEvaluator(
app_config=app_config,
model=model,
is_global=is_global,
permission=permission,
action=action,
evaluator_name=evaluator_name,
source_csv=file_path,
)
if was_empty:
raise ValueError("Empty permissions file")
return perm_is_global, perm_user_type_unresolved, nonempty_user_type_headers
# should be at least as large as the number of CSV files we load. This gets called by every has_perm() so must be cached
@lru_cache(maxsize=32)
def _resolve_functions(
file_paths: Iterable[Path],
resolve_permission_name: Optional[str],
resolve_evaluators: Iterable[Union[str, ResolveEvaluatorFunc]],
) -> Tuple[
Dict[PermName, Dict[UserType, Evaluator]],
Dict[PermName, bool],
Set[str],
Set[str]
]:
"""
:param file_paths: Path to the CSV files to read.
:resolve_permission_name: the settings.CSV_PERMISSIONS_RESOLVE_PERM_NAME setting.
:resolve_evaluators: the settings.CSV_PERMISSIONS_RESOLVE_EVALUATORS setting.
:return: A tuple of:
- dictionary mapping the permissions for each UserType to a function determining if the user has access.
- dictionary mapping the permission to a boolean indicating whether the permission is object level or global level.
- set of user types
- set of permissions
"""
if resolve_permission_name is None:
resolve_permission_name = default_resolve_perm_name
else:
resolve_permission_name = import_string(resolve_permission_name)
resolve_evaluators = tuple(
import_string(resolve_evaluator) if isinstance(resolve_evaluator, str) else resolve_evaluator
for resolve_evaluator
in resolve_evaluators
)
permission_is_global: Dict[PermName, bool] = {}
permission_is_global_source_csv: Dict[PermName, Path] = {}
known_user_types: Set[UserType] = set()
known_perms: Set[PermName] = set()
permission_to_user_type_to_unresolved: Dict[PermName, Dict[UserType, UnresolvedEvaluator]] = {}
for file_path in file_paths:
file_permission_is_global, new_permission_to_user_type_to_unresolved, user_types = \
_parse_csv(file_path, resolve_permission_name)
# merge global list of known user types/permissions
known_user_types.update(set(user_types))
known_perms.update(set(file_permission_is_global.keys()))
# merge is_global settings
for permission, is_global in file_permission_is_global.items():
if permission in permission_is_global and permission_is_global[permission] != is_global:
# we don't specifically keep track of which previous file set the is_global;
# look back through all of the unresolved permissions to find where it came from
# (this is slowish but only happens in the failure case)
raise ValueError(
f"'Is Global' for {permission} in {file_path} is inconsistent "
f"with a previous CSV file ({permission_is_global_source_csv[permission]})"
)
permission_is_global.update(file_permission_is_global)
permission_is_global_source_csv.update({perm: file_path for perm in file_permission_is_global.keys()})
# merge unresolved permissions
for permission, new_user_type_to_unresolved in new_permission_to_user_type_to_unresolved.items():
if permission not in permission_to_user_type_to_unresolved:
permission_to_user_type_to_unresolved[permission] = {}
for user_type, new_unresolved in new_user_type_to_unresolved.items():
if user_type not in permission_to_user_type_to_unresolved[permission]:
permission_to_user_type_to_unresolved[permission][user_type] = new_unresolved
else:
# both the new and an older CSV file include this cell
existing_unresolved = permission_to_user_type_to_unresolved[permission][user_type]
if new_unresolved == existing_unresolved:
# they are the same so do nothing (leaves the old one in place)
pass
elif existing_unresolved.evaluator_name == "":
# old CSV cell was empty, use new one
permission_to_user_type_to_unresolved[permission][user_type] = new_unresolved
elif new_unresolved.evaluator_name == "":
# new CSV cell is empty, use old one
pass
else:
# they were not the same and neither was empty. This means they're inconsistent
raise ValueError(
f"Permission {permission} for user type {user_type} in "
f"{file_path} is inconsistent with a previous CSV file "
f"({existing_unresolved.source_csv})"
)
# now take the partially resolved functions and resolve them
permission_to_user_type_to_evaluator: Dict[PermName, Dict[UserType, Evaluator]] = {}
for permission, user_type_to_unresolved in permission_to_user_type_to_unresolved.items():
if permission not in permission_to_user_type_to_evaluator:
permission_to_user_type_to_evaluator[permission] = {}
for user_type, detail in user_type_to_unresolved.items():
try:
for resolve_evaluator in resolve_evaluators:
evaluator = resolve_evaluator(detail)
if evaluator is not None:
permission_to_user_type_to_evaluator[permission][user_type] = evaluator
break
else:
raise ValueError(f"Could not resolve {permission} for {user_type} to anything")
except Exception as e:
raise RuntimeError(f"Error resolving {permission} for {user_type}: {detail.evaluator_name} ({e})") from e
return permission_to_user_type_to_evaluator, permission_is_global, known_user_types, known_perms
# note that django creates a new instance of an auth backend for every permission check!
class CSVPermissionsBackend:
permission_lookup: Dict[PermName, Dict[UserType, Evaluator]]
permission_is_global: Dict[PermName, bool]
known_user_types: Set[UserType]
known_perms: Set[PermName]
def __init__(self):
try:
permissions_paths = settings.CSV_PERMISSIONS_PATHS
except AttributeError:
try:
settings.CSV_PERMISSIONS_PATHS = (settings.CSV_PERMISSIONS_PATH,)
except AttributeError:
raise ImproperlyConfigured("csv_permissions requires settings.CSV_PERMISSIONS_PATHS to be set")
else:
permissions_paths = settings.CSV_PERMISSIONS_PATHS
del settings.CSV_PERMISSIONS_PATH
# make sure it's immutable so that it's hashable and _resolve_functions() can have @lru_cache() applied
if not isinstance(permissions_paths, tuple):
if isinstance(permissions_paths, (str, Path)):
raise ImproperlyConfigured("settings.CSV_PERMISSIONS_PATHS should be an iterable of paths")
permissions_paths = tuple(permissions_paths)
settings.CSV_PERMISSIONS_PATHS = permissions_paths
try:
resolve_perm_name = settings.CSV_PERMISSIONS_RESOLVE_PERM_NAME
except AttributeError:
try:
settings.CSV_PERMISSIONS_RESOLVE_PERM_NAME = settings.CSV_PERMISSIONS_RESOLVE_RULE_NAME
except AttributeError:
resolve_perm_name = None
else:
warnings.warn(
"settings.CSV_PERMISSIONS_RESOLVE_RULE_NAME is deprecated in favor of settings.CSV_PERMISSIONS_RESOLVE_PERM_NAME",
DeprecationWarning
)
resolve_perm_name = settings.CSV_PERMISSIONS_RESOLVE_RULE_NAME
try:
resolve_evaluators = settings.CSV_PERMISSIONS_RESOLVE_EVALUATORS
except AttributeError:
raise ImproperlyConfigured(
'settings.CSV_PERMISSIONS_RESOLVE_EVALUATORS must be defined. '
'For legacy 0.1.0 compatibility use "csv_permissions.legacy.legacy_resolve_evaluator".'
)
else:
if isinstance(resolve_evaluators, str):
resolve_evaluators = import_string(resolve_evaluators)
resolve_evaluators = tuple(resolve_evaluators)
self.permission_lookup, self.permission_is_global, self.known_user_types, self.known_perms = _resolve_functions(
permissions_paths,
resolve_perm_name,
resolve_evaluators,
)
def authenticate(self, request: HttpRequest, username: Optional[str] = None, password: Optional[str] = None):
return None
def is_global_perm(self, perm: str) -> bool:
try:
return self.permission_is_global[perm]
except KeyError as ke:
raise ValueError(f"Permission {perm} is not known") from ke
def has_perm(self, user: Model, perm: str, obj: Model) -> bool:
if user is None:
return False
get_user_type = getattr(settings, 'CSV_PERMISSIONS_GET_USER_TYPE', default_get_user_type)
if isinstance(get_user_type, str):
settings.CSV_PERMISSIONS_GET_USER_TYPE = import_string(settings.CSV_PERMISSIONS_GET_USER_TYPE)
get_user_type = settings.CSV_PERMISSIONS_GET_USER_TYPE
user_type = get_user_type(user)
if user_type is None:
# if there is no user_type then it's probably an AnonymousUser, but might also be a
# user using a different permissions backend; either way they're not covered by csv_permissions
return False
if getattr(settings, "CSV_PERMISSIONS_STRICT", False):
if perm not in self.known_perms:
raise LookupError(f"Permission {repr(perm)} is not known")
if user_type not in self.known_user_types:
raise LookupError(f"User Type {repr(user_type)} is not known")
try:
func = self.permission_lookup[perm][user_type]
except KeyError:
# If we get here it means that
# - the permission/user type is not known at all and CSV_PERMISSIONS_STRICT is not set
# or
# - the permission & user types are known but because there are multiple CSV files that
# particular combination doesn't appear in any CSV file
#
# in either case we allow django to try other backends
return False
return func(user, obj)
|
py | 1a43c2dc2e65fa9d7e44439a311b481fc945c241 | # Atribuir o valor 1 à variável var_teste
var_teste = 1
# Imprimir o valor da variável
print(var_teste)
# Atribuir o valor 2 à variável var_teste
var_teste = 2
# Imprimir o valor da variável
print(var_teste)
# Exibir o tipo de dados da variável
type(var_teste)
# Atribuir o valor 9.5 à variável var_teste
var_teste = 9.5
# Exibir o tipo de dados da variável
type(var_teste)
# ## Declaração Múltipla
pessoa1, pessoa2, pessoa3 = "Maria", "José", "Tobias"
fruta1 = fruta2 = fruta3 = "Laranja"
# ## Pode-se usar letras, números e underline (mas não se pode começar com números)
x1 = 50
# ## Variáveis atribuídas a outras variáveis e ordem dos operadores
largura = 2
altura = 4
area = largura * altura
print(area)
perimetro = 2 * largura + 2 * altura
print(perimetro)
perimetro = 2 * (largura + 2) * altura
print(perimetro)
idade1 = 25
idade2 = 35
idade1 + idade2
idade2 - idade1
idade2 * idade1
idade2 / idade1
idade2 % idade1
# ## Concatenação de Variáveis
nome = "Steve"
sobrenome = "Jobs"
fullName = nome + " " + sobrenome
print(fullName)
# script desenvolvido com base no material didático da DSA.
|
py | 1a43c37f8491cfff2b85541dba25d3b6399d6f8c | from . import register_plugin
@register_plugin
class Plugin:
def hello(self):
print("Hello from < Class >")
@register_plugin
def hello():
print("Hello from < Function >")
|
py | 1a43c44d39e87d19f3f432dbf0beb19a225a5388 | # Copyright 2015 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.utils.text import normalize_newlines
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from cloudkittydashboard.api import cloudkitty as api
LOG = logging.getLogger(__name__)
class CreateScriptForm(forms.SelfHandlingForm):
help_text = _('Create a new rating script.')
name = forms.CharField(label=_("Name"))
source_choices = [('raw', _('Direct Input')),
('file', _('File'))]
script_source = forms.ChoiceField(
label=_('Rating Script Source'),
choices=source_choices,
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'scriptsource'}))
script_help = _("A script or set of python commands to modify rating "
"calculations.")
script_upload = forms.FileField(
label=_('Script File'),
help_text=script_help,
widget=forms.FileInput(attrs={
'class': 'switched',
'data-switch-on': 'scriptsource',
'data-scriptsource-file': _('Script File')}),
required=False)
script_data = forms.CharField(
label=_('Script Data'),
help_text=script_help,
widget=forms.widgets.Textarea(attrs={
'class': 'switched',
'data-switch-on': 'scriptsource',
'data-scriptsource-raw': _('Script Data')}),
required=False)
class Meta(object):
name = _('Create Script')
def clean(self):
cleaned = super(CreateScriptForm, self).clean()
files = self.request.FILES
script = self.clean_uploaded_files('script', files)
if script is not None:
cleaned['script_data'] = script
return cleaned
def clean_uploaded_files(self, prefix, files):
upload_str = prefix + "_upload"
has_upload = upload_str in files
if has_upload:
upload_file = files[upload_str]
log_script_name = upload_file.name
LOG.info('got upload %s' % log_script_name)
script = upload_file.read()
if script != "":
try:
normalize_newlines(script)
except Exception as e:
msg = _('There was a problem parsing the'
' %(prefix)s: %(error)s')
msg = msg % {'prefix': prefix, 'error': e}
raise forms.ValidationError(msg)
return script
else:
return None
def handle(self, request, data):
name = data['name']
LOG.info('Creating script with name %s' % (name))
ck_client = api.cloudkittyclient(request)
return ck_client.rating.pyscripts.create_script(
name=name,
data=data['script_data'])
class EditScriptForm(CreateScriptForm):
script_id = forms.CharField(label=_("Script ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
fields_order = ['script_id', 'name', 'script_source', 'script_upload',
'script_data']
class Meta(object):
name = _("Update Script")
def handle(self, request, data):
script_id = self.initial['script_id']
LOG.info('Updating script with id %s' % (script_id))
ck_client = api.cloudkittyclient(request)
return ck_client.rating.pyscripts.update_script(
script_id=script_id, name=data['name'], data=data['script_data'])
|
py | 1a43c5184e6e3e1952fc925a30748cac197afe5b | """
xterm terminal info
Since most of the Windows virtual processing schemes are based on xterm
This file is intended to be sourced and includes the man page descriptions
Most of this information came from the terminfo man pages, part of ncurses
More information on ncurses can be found at:
https://www.gnu.org/software/ncurses/ncurses.html
The values are as reported by infocmp on Fedora 30 with ncurses 6.1
"""
# pylint: disable=wrong-spelling-in-comment,line-too-long
# flake8: noqa: E501
BOOL_CAPS = [
'am', # (auto_right_margin) terminal has automatic margins
'bce', # (back_color_erase) screen erased with background color
# 'bw', # (auto_left_margin) cub1 wraps from column 0 to last column
# 'ccc', # (can_change) terminal can re-define existing colors
# 'chts', # (hard_cursor) cursor is hard to see
# 'cpix', # (cpi_changes_res) changing character pitch changes resolution
# 'crxm', # (cr_cancels_micro_mode) using cr turns off micro mode
# 'daisy', # (has_print_wheel) printer needs operator to change character set
# 'da', # (memory_above) display may be retained above the screen
# 'db', # (memory_below) display may be retained below the screen
# 'eo', # (erase_overstrike) can erase overstrikes with a blank
# 'eslok', # (status_line_esc_ok) escape can be used on the status line
# 'gn', # (generic_type) generic line type
# 'hc', # (hard_copy) hardcopy terminal
# 'hls', # (hue_lightness_saturation) terminal uses only HLS color notation (Tektronix)
# 'hs', # (has_status_line) has extra status line
# 'hz', # (tilde_glitch) cannot print ~'s (Hazeltine)
# 'in', # (insert_null_glitch) insert mode distinguishes nulls
'km', # (has_meta_key) Has a meta key (i.e., sets 8th-bit)
# 'lpix', # (lpi_changes_res) changing line pitch changes resolution
'mc5i', # (prtr_silent) printer will not echo on screen
'mir', # (move_insert_mode) safe to move while in insert mode
'msgr', # (move_standout_mode) safe to move while in standout mode
# 'ndscr', # (non_dest_scroll_region) scrolling region is non-destructive
'npc', # (no_pad_char) pad character does not exist
# 'nrrmc', # (non_rev_rmcup) smcup does not reverse rmcup
# 'nxon', # (needs_xon_xoff) padding will not work, xon/xoff required
# 'os', # (over_strike) terminal can overstrike
# 'sam', # (semi_auto_right_margin) printing in last column causes cr
# 'ul', # (transparent_underline) underline character overstrikes
'xenl', # (eat_newline_glitch) newline ignored after 80 cols (concept)
# 'xhpa', # (col_addr_glitch) only positive motion for hpa/mhpa caps
# 'xhp', # (ceol_standout_glitch) standout not erased by overwriting (hp)
# 'xon', # (xon_xoff) terminal uses xon/xoff handshaking
# 'xsb', # (no_esc_ctlc) beehive (f1=escape, f2=ctrl C)
# 'xt', # (dest_tabs_magic_smso) tabs destructive, magic so char (t1061)
# 'xvpa', # (row_addr_glitch) only positive motion for vpa/mvpa caps
]
NUM_CAPS = {
# 'bitwin': 0, # (bit_image_entwining) number of passes for each bit-image row
# 'bitype': 0, # (bit_image_type) type of bit-image device
# 'btns': 0, # (buttons) number of buttons on mouse
# 'bufsz': 0, # (buffer_capacity) numbers of bytes buffered before printing
'colors': 8, # (max_colors) maximum number of colors on screen
'cols': 80, # (columns) number of columns in a line
# 'cps': 0, # (print_rate) print rate in characters per second
'it': 8, # (init_tabs) tabs initially every # spaces
# 'lh': 0, # (label_height) rows in each label
'lines': 24, # (lines) number of lines on screen or page
# 'lm': 0, # (lines_of_memory) lines of memory if > line. 0 means varies
# 'lw': 0, # (label_width) columns in each label
# 'ma': 0, # (max_attributes) maximum combined attributes terminal can handle
# 'maddr': 0, # (max_micro_address) maximum value in micro_..._address
# 'mcs': 0, # (micro_col_size) character step size when in micro mode
# 'mjump': 0, # (max_micro_jump) maximum value in parm_..._micro
# 'mls': 0, # (micro_line_size) line step size when in micro mode
# 'ncv': 0, # (no_color_video) video attributes that cannot be used with colors
# 'nlab': 0, # (num_labels) number of labels on screen
# 'npins': 0, # (number_of_pins) numbers of pins in print-head
# 'orc': 0, # (output_res_char) horizontal resolution in units per line
# 'orhi': 0, # (output_res_horz_inch) horizontal resolution in units per inch
# 'orl': 0, # (output_res_line) vertical resolution in units per line
# 'orvi': 0, # (output_res_vert_inch) vertical resolution in units per inch
'pairs': 64, # (max_pairs) maximum number of color-pairs on the screen
# 'pb': 0, # (padding_baud_rate) lowest baud rate where padding needed
# 'spinh': 0, # (dot_horz_spacing) spacing of dots horizontally in dots per inch
# 'spinv': 0, # (dot_vert_spacing) spacing of pins vertically in pins per inch
# 'vt': 0, # (virtual_terminal) virtual terminal number (CB/unix)
# 'widcs': 0, # (wide_char_size) character step size when in double wide mode
# 'wnum': 0, # (maximum_windows) maximum number of definable windows
# 'wsl': 0, # (width_status_line) number of columns in status line
# 'xmc': 0, # (magic_cookie_glitch) number of blank characters left by smso or rmso
}
STR_CAPS = {
'acsc': b'``aaffggiijjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~', # (acs_chars) graphics charset pairs, based on vt100
'bel': b'^G', # (bell) audible signal (bell) (P)
# 'bicr': b'', # (bit_image_carriage_return) Move to beginning of same row
# 'binel': b'', # (bit_image_newline) Move to next row of the bit image
# 'birep': b'', # (bit_image_repeat) Repeat bit image cell #1 #2 times
'blink': b'\x1b[5m', # (enter_blink_mode) turn on blinking
'bold': b'\x1b[1m', # (enter_bold_mode) turn on bold (extra bright) mode
'cbt': b'\x1b[Z', # (back_tab) back tab (P)
# 'chr': b'', # (change_res_horz) Change horizontal resolution to #1
'civis': b'\x1b[?25l', # (cursor_invisible) make cursor invisible
'clear': b'\x1b[H\x1b[2J', # (clear_screen) clear screen and home cursor (P*)
# 'cmdch': b'', # (command_character) terminal settable cmd character in prototype !?
'cnorm': b'\x1b[?12l\x1b[?25h', # (cursor_normal) make cursor appear normal (undo civis/cvvis)
# 'colornm': b'', # (color_names) Give name for color #1
# 'cpi': b'', # (change_char_pitch) Change number of characters per inch to #1
'cr': b'\r', # (carriage_return) carriage return (P*) (P*)
# 'csin': b'', # (code_set_init) Init sequence for multiple codesets
# 'csnm': b'', # (char_set_names) Produce #1'th item from list of character set names
'csr': b'\x1b[%i%p1%d;%p2%dr', # (change_scroll_region) change region to line #1 to line #2 (P)
'cub1': b'^H', # (cursor_left) move left one space
'cub': b'\x1b[%p1%dD', # (parm_left_cursor) move #1 characters to the left (P)
'cud1': b'\n', # (cursor_down) down one line
'cud': b'\x1b[%p1%dB', # (parm_down_cursor) down #1 lines (P*)
'cuf1': b'\x1b[C', # (cursor_right) non-destructive space (move right one space)
'cuf': b'\x1b[%p1%dC', # (parm_right_cursor) move #1 characters to the right (P*)
'cup': b'\x1b[%i%p1%d;%p2%dH', # (cursor_address) move to row #1 columns #2
'cuu1': b'\x1b[A', # (cursor_up) up one line
'cuu': b'\x1b[%p1%dA', # (parm_up_cursor) up #1 lines (P*)
# 'cvr': b'', # (change_res_vert) Change vertical resolution to #1
'cvvis': b'\x1b[?12;25h', # (cursor_visible) make cursor very visible
# 'cwin': b'', # (create_window) define a window #1 from #2,#3 to #4,#5
'dch1': b'\x1b[P', # (delete_character) delete character (P*)
'dch': b'\x1b[%p1%dP', # (parm_dch) delete #1 characters (P*)
# 'dclk': b'', # (display_clock) display clock
# 'defbi': b'', # (define_bit_image_region) Define rectangular bit image region
# 'defc': b'', # (define_char) Define a character #1, #2 dots wide, descender #3
# 'devt': b'', # (device_type) Indicate language/codeset support
# 'dial': b'', # (dial_phone) dial number #1
'dim': b'\x1b[2m', # (enter_dim_mode) turn on half-bright mode
# 'dispc': b'', # (display_pc_char) Display PC character #1
'dl1': b'\x1b[M', # (delete_line) delete line (P*)
'dl': b'\x1b[%p1%dM', # (parm_delete_line) delete #1 lines (P*)
# 'docr': b'', # (these_cause_cr) Printing any of these characters causes CR
# 'dsl': b'', # (dis_status_line) disable status line
'ech': b'\x1b[%p1%dX', # (erase_chars) erase #1 characters (P)
'ed': b'\x1b[J', # (clr_eos) clear to end of screen (P*)
'el1': b'\x1b[1K', # (clr_bol) Clear to beginning of line
'el': b'\x1b[K', # (clr_eol) clear to end of line (P)
# 'enacs': b'', # (ena_acs) enable alternate char set
# 'endbi': b'', # (end_bit_image_region) End a bit-image region
# 'ff': b'', # (form_feed) hardcopy terminal page eject (P*)
'flash': b'\x1b[?5h$<100/>\x1b[?5l', # (flash_screen) visible bell (may not move cursor)
# 'fln': b'', # (label_format) label format
# 'fsl': b'', # (from_status_line) return from status line
# 'getm': b'', # (get_mouse) Curses should get button events, parameter #1 not documented.
# 'hd': b'', # (down_half_line) half a line down
'home': b'\x1b[H', # (cursor_home) home cursor (if no cup)
# 'hook': b'', # (flash_hook) flash switch hook
'hpa': b'\x1b[%i%p1%dG', # (column_address) horizontal position #1, absolute (P)
'ht': b'^I', # (tab) tab to next 8-space hardware tab stop
'hts': b'\x1bH', # (set_tab) set a tab in every row, current columns
# 'hu': b'', # (up_half_line) half a line up
# 'hup': b'', # (hangup) hang-up phone
# 'ich1': b'', # (insert_character) insert character (P)
'ich': b'\x1b[%p1%d@', # (parm_ich) insert #1 characters (P*)
# 'if': b'', # (init_file) name of initialization file
'il1': b'\x1b[L', # (insert_line) insert line (P*)
'il': b'\x1b[%p1%dL', # (parm_insert_line) insert #1 lines (P*)
'ind': b'\n', # (scroll_forward) scroll text up (P)
'indn': b'\x1b[%p1%dS', # (parm_index) scroll forward #1 lines (P)
# 'initc': b'', # (initialize_color) initialize color #1 to (#2,#3,#4)
# 'initp': b'', # (initialize_pair) Initialize color pair #1 to fg=(#2,#3,#4), bg=(#5,#6,#7)
'invis': b'\x1b[8m', # (enter_secure_mode) turn on blank mode (characters invisible)
# 'ip': b'', # (insert_padding) insert padding after inserted character
# 'iprog': b'', # (init_prog) path name of program for initialization
# 'is1': b'', # (init_1string) initialization string
'is2': b'\x1b[!p\x1b[?3;4l\x1b[4l\x1b>', # (init_2string) initialization string
# 'is3': b'', # (init_3string) initialization string
# 'ka1': b'', # (key_a1) upper left of keypad
# 'ka3': b'', # (key_a3) upper right of keypad
'kb2': b'\x1bOE', # (key_b2) center of keypad
# 'kbeg': b'', # (key_beg) begin key
# 'kBEG': b'', # (key_sbeg) shifted begin key
'kbs': b'^?', # (key_backspace) backspace key
# 'kc1': b'', # (key_c1) lower left of keypad
# 'kc3': b'', # (key_c3) lower right of keypad
# 'kcan': b'', # (key_cancel) cancel key
# 'kCAN': b'', # (key_scancel) shifted cancel key
'kcbt': b'\x1b[Z', # (key_btab) back-tab key
# 'kclo': b'', # (key_close) close key
# 'kclr': b'', # (key_clear) clear-screen or erase key
# 'kcmd': b'', # (key_command) command key
# 'kCMD': b'', # (key_scommand) shifted command key
# 'kcpy': b'', # (key_copy) copy key
# 'kCPY': b'', # (key_scopy) shifted copy key
# 'kcrt': b'', # (key_create) create key
# 'kCRT': b'', # (key_screate) shifted create key
# 'kctab': b'', # (key_ctab) clear-tab key
'kcub1': b'\x1bOD', # (key_left) left-arrow key
'kcud1': b'\x1bOB', # (key_down) down-arrow key
'kcuf1': b'\x1bOC', # (key_right) right-arrow key
'kcuu1': b'\x1bOA', # (key_up) up-arrow key
'kDC': b'\x1b[3;2~', # (key_sdc) shifted delete- character key
'kdch1': b'\x1b[3~', # (key_dc) delete-character key
# 'kdl1': b'', # (key_dl) delete-line key
# 'kDL': b'', # (key_sdl) shifted delete-line key
# 'ked': b'', # (key_eos) clear-to-end-of- screen key
# 'kel': b'', # (key_eol) clear-to-end-of-line key
'kEND': b'\x1b[1;2F', # (key_send) shifted end key
'kend': b'\x1bOF', # (key_end) end key
'kent': b'\x1bOM', # (key_enter) enter/send key
# 'kEOL': b'', # (key_seol) shifted clear-to- end-of-line key
# 'kext': b'', # (key_exit) exit key
# 'kEXT': b'', # (key_sexit) shifted exit key
# 'kf0': b'', # (key_f0) F0 function key
'kf1': b'\x1bOP', # (key_f1) F1 function key
'kf2': b'\x1bOQ', # (key_f2) F2 function key
'kf3': b'\x1bOR', # (key_f3) F3 function key
'kf4': b'\x1bOS', # (key_f4) F4 function key
'kf5': b'\x1b[15~', # (key_f5) F5 function key
'kf6': b'\x1b[17~', # (key_f6) F6 function key
'kf7': b'\x1b[18~', # (key_f7) F7 function key
'kf8': b'\x1b[19~', # (key_f8) F8 function key
'kf9': b'\x1b[20~', # (key_f9) F9 function key
'kf10': b'\x1b[21~', # (key_f10) F10 function key
'kf11': b'\x1b[23~', # (key_f11) F11 function key
'kf12': b'\x1b[24~', # (key_f12) F12 function key
'kf13': b'\x1b[1;2P', # (key_f13) F13 function key
'kf14': b'\x1b[1;2Q', # (key_f14) F14 function key
'kf15': b'\x1b[1;2R', # (key_f15) F15 function key
'kf16': b'\x1b[1;2S', # (key_f16) F16 function key
'kf17': b'\x1b[15;2~', # (key_f17) F17 function key
'kf18': b'\x1b[17;2~', # (key_f18) F18 function key
'kf19': b'\x1b[18;2~', # (key_f19) F19 function key
'kf20': b'\x1b[19;2~', # (key_f20) F20 function key
'kf21': b'\x1b[20;2~', # (key_f21) F21 function key
'kf22': b'\x1b[21;2~', # (key_f22) F22 function key
'kf23': b'\x1b[23;2~', # (key_f23) F23 function key
'kf24': b'\x1b[24;2~', # (key_f24) F24 function key
'kf25': b'\x1b[1;5P', # (key_f25) F25 function key
'kf26': b'\x1b[1;5Q', # (key_f26) F26 function key
'kf27': b'\x1b[1;5R', # (key_f27) F27 function key
'kf28': b'\x1b[1;5S', # (key_f28) F28 function key
'kf29': b'\x1b[15;5~', # (key_f29) F29 function key
'kf30': b'\x1b[17;5~', # (key_f30) F30 function key
'kf31': b'\x1b[18;5~', # (key_f31) F31 function key
'kf32': b'\x1b[19;5~', # (key_f32) F32 function key
'kf33': b'\x1b[20;5~', # (key_f33) F33 function key
'kf34': b'\x1b[21;5~', # (key_f34) F34 function key
'kf35': b'\x1b[23;5~', # (key_f35) F35 function key
'kf36': b'\x1b[24;5~', # (key_f36) F36 function key
'kf37': b'\x1b[1;6P', # (key_f37) F37 function key
'kf38': b'\x1b[1;6Q', # (key_f38) F38 function key
'kf39': b'\x1b[1;6R', # (key_f39) F39 function key
'kf40': b'\x1b[1;6S', # (key_f40) F40 function key
'kf41': b'\x1b[15;6~', # (key_f41) F41 function key
'kf42': b'\x1b[17;6~', # (key_f42) F42 function key
'kf43': b'\x1b[18;6~', # (key_f43) F43 function key
'kf44': b'\x1b[19;6~', # (key_f44) F44 function key
'kf45': b'\x1b[20;6~', # (key_f45) F45 function key
'kf46': b'\x1b[21;6~', # (key_f46) F46 function key
'kf47': b'\x1b[23;6~', # (key_f47) F47 function key
'kf48': b'\x1b[24;6~', # (key_f48) F48 function key
'kf49': b'\x1b[1;3P', # (key_f49) F49 function key
'kf50': b'\x1b[1;3Q', # (key_f50) F50 function key
'kf51': b'\x1b[1;3R', # (key_f51) F51 function key
'kf52': b'\x1b[1;3S', # (key_f52) F52 function key
'kf53': b'\x1b[15;3~', # (key_f53) F53 function key
'kf54': b'\x1b[17;3~', # (key_f54) F54 function key
'kf55': b'\x1b[18;3~', # (key_f55) F55 function key
'kf56': b'\x1b[19;3~', # (key_f56) F56 function key
'kf57': b'\x1b[20;3~', # (key_f57) F57 function key
'kf58': b'\x1b[21;3~', # (key_f58) F58 function key
'kf59': b'\x1b[23;3~', # (key_f59) F59 function key
'kf60': b'\x1b[24;3~', # (key_f60) F60 function key
'kf61': b'\x1b[1;4P', # (key_f61) F61 function key
'kf62': b'\x1b[1;4Q', # (key_f62) F62 function key
'kf63': b'\x1b[1;4R', # (key_f63) F63 function key
# 'kfnd': b'', # (key_find) find key
# 'kFND': b'', # (key_sfind) shifted find key
# 'khlp': b'', # (key_help) help key
# 'kHLP': b'', # (key_shelp) shifted help key
'kHOM': b'\x1b[1;2H', # (key_shome) shifted home key
'khome': b'\x1bOH', # (key_home) home key
# 'khts': b'', # (key_stab) set-tab key
'kIC': b'\x1b[2;2~', # (key_sic) shifted insert- character key
'kich1': b'\x1b[2~', # (key_ic) insert-character key
# 'kil1': b'', # (key_il) insert-line key
'kind': b'\x1b[1;2B', # (key_sf) scroll-forward key
'kLFT': b'\x1b[1;2D', # (key_sleft) shifted left-arrow key
# 'kll': b'', # (key_ll) lower-left key (home down)
'kmous': b'\x1b[<', # (key_mouse) Mouse event has occurred
# 'kmov': b'', # (key_move) move key
# 'kMOV': b'', # (key_smove) shifted move key
# 'kmrk': b'', # (key_mark) mark key
# 'kmsg': b'', # (key_message) message key
# 'kMSG': b'', # (key_smessage) shifted message key
'knp': b'\x1b[6~', # (key_npage) next-page key
# 'knxt': b'', # (key_next) next key
'kNXT': b'\x1b[6;2~', # (key_snext) shifted next key
# 'kopn': b'', # (key_open) open key
# 'kopt': b'', # (key_options) options key
# 'kOPT': b'', # (key_soptions) shifted options key
'kpp': b'\x1b[5~', # (key_ppage) previous-page key
# 'kprt': b'', # (key_print) print key
# 'kPRT': b'', # (key_sprint) shifted print key
# 'kprv': b'', # (key_previous) previous key
'kPRV': b'\x1b[5;2~', # (key_sprevious) shifted previous key
# 'krdo': b'', # (key_redo) redo key
# 'kRDO': b'', # (key_sredo) shifted redo key
# 'kref': b'', # (key_reference) reference key
# 'kres': b'', # (key_resume) resume key
# 'kRES': b'', # (key_srsume) shifted resume key
# 'krfr': b'', # (key_refresh) refresh key
'kri': b'\x1b[1;2A', # (key_sr) scroll-backward key
'kRIT': b'\x1b[1;2C', # (key_sright) shifted right-arrow key
# 'krmir': b'', # (key_eic) sent by rmir or smir in insert mode
# 'krpl': b'', # (key_replace) replace key
# 'kRPL': b'', # (key_sreplace) shifted replace key
# 'krst': b'', # (key_restart) restart key
# 'ksav': b'', # (key_save) save key
# 'kSAV': b'', # (key_ssave) shifted save key
# 'kslt': b'', # (key_select) select key
# 'kSPD': b'', # (key_ssuspend) shifted suspend key
# 'kspd': b'', # (key_suspend) suspend key
# 'ktbc': b'', # (key_catab) clear-all-tabs key
# 'kUND': b'', # (key_sundo) shifted undo key
# 'kund': b'', # (key_undo) undo key
# 'lf0': b'', # (lab_f0) label on function key f0 if not f0
# 'lf10': b'', # (lab_f10) label on function key f10 if not f10
# 'lf1': b'', # (lab_f1) label on function key f1 if not f1
# 'lf2': b'', # (lab_f2) label on function key f2 if not f2
# 'lf3': b'', # (lab_f3) label on function key f3 if not f3
# 'lf4': b'', # (lab_f4) label on function key f4 if not f4
# 'lf5': b'', # (lab_f5) label on function key f5 if not f5
# 'lf6': b'', # (lab_f6) label on function key f6 if not f6
# 'lf7': b'', # (lab_f7) label on function key f7 if not f7
# 'lf8': b'', # (lab_f8) label on function key f8 if not f8
# 'lf9': b'', # (lab_f9) label on function key f9 if not f9
# 'll': b'', # (cursor_to_ll) last line, first column (if no cup)
# 'lpi': b'', # (change_line_pitch) Change number of lines per inch to #1
'meml': b'\x1bl', # lock memory above the curser
'memu': b'\x1bl', # unlock memory above the curser
'mc0': b'\x1b[i', # (print_screen) print contents of screen
'mc4': b'\x1b[4i', # (prtr_off) turn off printer
'mc5': b'\x1b[5i', # (prtr_on) turn on printer
# 'mc5p': b'', # (prtr_non) turn on printer for #1 bytes
# 'mcub1': b'', # (micro_left) Like cursor_left in micro mode
# 'mcub': b'', # (parm_left_micro) Like parm_left_cursor in micro mode
# 'mcud1': b'', # (micro_down) Like cursor_down in micro mode
# 'mcud': b'', # (parm_down_micro) Like parm_down_cursor in micro mode
# 'mcuf1': b'', # (micro_right) Like cursor_right in micro mode
# 'mcuf': b'', # (parm_right_micro) Like parm_right_cursor in micro mode
# 'mcuu1': b'', # (micro_up) Like cursor_up in micro mode
# 'mcuu': b'', # (parm_up_micro) Like parm_up_cursor in micro mode
# 'mgc': b'', # (clear_margins) clear right and left soft margins
# 'mhpa': b'', # (micro_column_address) Like column_address in micro mode
# 'minfo': b'', # (mouse_info) Mouse status information
# 'mrcup': b'', # (cursor_mem_address) memory relative cursor addressing, move to row #1 columns #2
# 'mvpa': b'', # (micro_row_address) Like row_address #1 in micro mode
# 'nel': b'', # (newline) newline (behave like cr followed by lf)
# 'oc': b'', # (orig_colors) Set all color pairs to the original ones
'op': b'\x1b[39;49m', # (orig_pair) Set default pair to its original value
# 'pad': b'', # (pad_char) padding char (instead of null)
# 'pause': b'', # (fixed_pause) pause for 2-3 seconds
# 'pctrm': b'', # (pc_term_options) PC terminal options
# 'pfkey': b'', # (pkey_key) program function key #1 to type string #2
# 'pfloc': b'', # (pkey_local) program function key #1 to execute string #2
# 'pfx': b'', # (pkey_xmit) program function key #1 to transmit string #2
# 'pfxl': b'', # (pkey_plab) Program function key #1 to type string #2 and show string #3
# 'pln': b'', # (plab_norm) program label #1 to show string #2
# 'porder': b'', # (order_of_pins) Match software bits to print-head pins
# 'prot': b'', # (enter_protected_mode) turn on protected mode
# 'pulse': b'', # (pulse) select pulse dialing
# 'qdial': b'', # (quick_dial) dial number #1 without checking
# 'rbim': b'', # (stop_bit_image) Stop printing bit image graphics
'rc': b'\x1b8', # (restore_cursor) restore cursor to position of last save_cursor
# 'rcsd': b'', # (stop_char_set_def) End definition of character set #1
'rep': b'%p1%c\x1b[%p2%{1}%-%db', # (repeat_char) repeat char #1 #2 times (P*)
# 'reqmp': b'', # (req_mouse_pos) Request mouse position
'rev': b'\x1b[7m', # (enter_reverse_mode) turn on reverse video mode
# 'rf': b'', # (reset_file) name of reset file
# 'rfi': b'', # (req_for_input) send next input char (for ptys)
'ri': b'\x1bM', # (scroll_reverse) scroll text down (P)
'rin': b'\x1b[%p1%dT', # (parm_rindex) scroll back #1 lines (P)
'ritm': b'\x1b[23m', # (exit_italics_mode) End italic mode
# 'rlm': b'', # (exit_leftward_mode) End left-motion mode
'rmacs': b'\x1b(B', # (exit_alt_charset_mode) end alternate character set (P)
'rmam': b'\x1b[?7l', # (exit_am_mode) turn off automatic margins
# 'rmclk': b'', # (remove_clock) remove clock
'rmcup': b'\x1b[?1049l\x1b[23;0;0t', # (exit_ca_mode) strings to end programs using cup
# 'rmdc': b'', # (exit_delete_mode) end delete mode
# 'rmicm': b'', # (exit_micro_mode) End micro-motion mode
'rmir': b'\x1b[4l', # (exit_insert_mode) exit insert mode
'rmkx': b'\x1b[?1l\x1b>', # (keypad_local) leave 'keyboard_transmit' mode
# 'rmln': b'', # (label_off) turn off soft labels
'rmm': b'\x1b[?1034l', # (meta_off) turn off meta mode
# 'rmp': b'', # (char_padding) like ip but when in insert mode
# 'rmpch': b'', # (exit_pc_charset_mode) Exit PC character display mode
# 'rmsc': b'', # (exit_scancode_mode) Exit PC scancode mode
'rmso': b'\x1b[27m', # (exit_standout_mode) exit standout mode
'rmul': b'\x1b[24m', # (exit_underline_mode) exit underline mode
# 'rmxon': b'', # (exit_xon_mode) turn off xon/xoff handshaking
'rs1': b'\x1bc', # (reset_1string) reset string
'rs2': b'\x1b[!p\x1b[?3;4l\x1b[4l\x1b>', # (reset_2string) reset string
# 'rs3': b'', # (reset_3string) reset string
# 'rshm': b'', # (exit_shadow_mode) End shadow-print mode
# 'rsubm': b'', # (exit_subscript_mode) End subscript mode
# 'rsupm': b'', # (exit_superscript_mode) End superscript mode
# 'rum': b'', # (exit_upward_mode) End reverse character motion
# 'rwidm': b'', # (exit_doublewide_mode) End double-wide mode
# 's0ds': b'', # (set0_des_seq) Shift to codeset 0 (EUC set 0, ASCII)
# 's1ds': b'', # (set1_des_seq) Shift to codeset 1
# 's2ds': b'', # (set2_des_seq) Shift to codeset 2
# 's3ds': b'', # (set3_des_seq) Shift to codeset 3
# 'sbim': b'', # (start_bit_image) Start printing bit image graphics
'sc': b'\x1b7', # (save_cursor) save current cursor position (P)
# 'scesa': b'', # (alt_scancode_esc) Alternate escape for scancode emulation
# 'scesc': b'', # (scancode_escape) Escape for scancode emulation
# 'sclk': b'', # (set_clock) set clock, #1 hrs #2 mins #3 secs
# 'scp': b'', # (set_color_pair) Set current color pair to #1
# 'scs': b'', # (select_char_set) Select character set, #1
# 'scsd': b'', # (start_char_set_def) Start character set definition #1, with #2 characters in the set
# 'sdrfq': b'', # (enter_draft_quality) Enter draft-quality mode
'setab': b'\x1b[4%p1%dm', # (set_a_background) Set background color to #1, using ANSI escape
'setaf': b'\x1b[3%p1%dm', # (set_a_foreground) Set foreground color to #1, using ANSI escape
'setb': b'\x1b[4%?%p1%{1}%=%t4%e%p1%{3}%=%t6%e%p1%{4}%=%t1%e%p1%{6}%=%t3%e%p1%d%;m', # (set_background) Set background color #1
# 'setcolor': b'', # (set_color_band) Change to ribbon color #1
'setf': b'\x1b[3%?%p1%{1}%=%t4%e%p1%{3}%=%t6%e%p1%{4}%=%t1%e%p1%{6}%=%t3%e%p1%d%;m', # (set_foreground) Set foreground color #1
'sgr0': b'\x1b(B\x1b[m', # (exit_attribute_mode) turn off all attributes
'sgr': b'%?%p9%t\x1b(0%e\x1b(B%;\x1b[0%?%p6%t;1%;%?%p5%t;2%;%?%p2%t;4%;%?%p1%p3%|%t;7%;%?%p4%t;5%;%?%p7%t;8%;m', # (set_attributes) define video attributes #1-#9 (PG9)
'sitm': b'\x1b[3m', # (enter_italics_mode) Enter italic mode
# 'slines': b'', # (set_page_length) Set page length to #1 lines
# 'slm': b'', # (enter_leftward_mode) Start leftward carriage motion
'smacs': b'\x1b(0', # (enter_alt_charset_mode) start alternate character set (P)
'smam': b'\x1b[?7h', # (enter_am_mode) turn on automatic margins
'smcup': b'\x1b[?1049h\x1b[22;0;0t', # (enter_ca_mode) string to start programs using cup
# 'smdc': b'', # (enter_delete_mode) enter delete mode
# 'smgb': b'', # (set_bottom_margin) Set bottom margin at current line
# 'smgbp': b'', # (set_bottom_margin_parm) Set bottom margin at line #1 or (if smgtp is not given) #2 lines from bottom
# 'smgl': b'', # (set_left_margin) set left soft margin at current column. See smgl. (ML is not in BSD termcap).
# 'smglp': b'', # (set_left_margin_parm) Set left (right) margin at column #1
# 'smglr': b'', # (set_lr_margin) Set both left and right margins to #1, #2. (ML is not in BSD termcap).
# 'smgr': b'', # (set_right_margin) set right soft margin at current column
# 'smgrp': b'', # (set_right_margin_parm) Set right margin at column #1
# 'smgtb': b'', # (set_tb_margin) Sets both top and bottom margins to #1, #2
# 'smgt': b'', # (set_top_margin) Set top margin at current line
# 'smgtp': b'', # (set_top_margin_parm) Set top (bottom) margin at row #1
# 'smicm': b'', # (enter_micro_mode) Start micro-motion mode
'smir': b'\x1b[4h', # (enter_insert_mode) enter insert mode
'smkx': b'\x1b[?1h\x1b=', # (keypad_xmit) enter 'keyboard_transmit' mode
# 'smln': b'', # (label_on) turn on soft labels
'smm': b'\x1b[?1034h', # (meta_on) turn on meta mode (8th-bit on)
# 'smpch': b'', # (enter_pc_charset_mode) Enter PC character display mode
# 'smsc': b'', # (enter_scancode_mode) Enter PC scancode mode
'smso': b'\x1b[7m', # (enter_standout_mode) begin standout mode
'smul': b'\x1b[4m', # (enter_underline_mode) begin underline mode
# 'smxon': b'', # (enter_xon_mode) turn on xon/xoff handshaking
# 'snlq': b'', # (enter_near_letter_quality) Enter NLQ mode
# 'snrmq': b'', # (enter_normal_quality) Enter normal-quality mode
# 'sshm': b'', # (enter_shadow_mode) Enter shadow-print mode
# 'ssubm': b'', # (enter_subscript_mode) Enter subscript mode
# 'ssupm': b'', # (enter_superscript_mode) Enter superscript mode
# 'subcs': b'', # (subscript_characters) List of subscriptable characters
# 'sum': b'', # (enter_upward_mode) Start upward carriage motion
# 'supcs': b'', # (superscript_characters) List of superscriptable characters
# 'swidm': b'', # (enter_doublewide_mode) Enter double-wide mode
'tbc': b'\x1b[3g', # (clear_all_tabs) clear all tab stops (P)
# 'tone': b'', # (tone) select touch tone dialing
# 'tsl': b'', # (to_status_line) move to status line, column #1
# 'u0': b'', # (user0) User string #0
# 'u1': b'', # (user1) User string #1
# 'u2': b'', # (user2) User string #2
# 'u3': b'', # (user3) User string #3
# 'u4': b'', # (user4) User string #4
# 'u5': b'', # (user5) User string #5
'u6': b'\x1b[%i%d;%dR', # (user6) User string #6 [cursor position report (equiv. to ANSI/ECMA-48 CPR)]
'u7': b'\x1b[6n', # (user7) User string #7 [cursor position request (equiv. to VT100/ANSI/ECMA-48 DSR 6)]
'u8': b'\x1b[?%[;0123456789]c', # (user8) User string #8 [terminal answerback description]
'u9': b'\x1b[c', # (user9) User string #9 [terminal enquire string (equiv. to ANSI/ECMA-48 DA)]
# 'uc': b'', # (underline_char) underline char and move past it
'vpa': b'\x1b[%i%p1%dd', # (row_address) vertical position #1 absolute (P)
# 'wait': b'', # (wait_tone) wait for dial-tone
# 'wind': b'', # (set_window) current window is lines #1-#2 cols #3-#4
# 'wingo': b'', # (goto_window) go to window #1
# 'xoffc': b'', # (xoff_character) XOFF character
# 'xonc': b'', # (xon_character) XON character
# 'zerom': b'', # (zero_motion) No motion for subsequent character
}
|
py | 1a43c5ccf50abba6ca54d880484b1b9170e30f87 | # https://github.com/openai/gym/blob/master/gym/envs/classic_control/pendulum.py
# https://mspries.github.io/jimmy_pendulum.html
#!/usr/bin/env python3
import time
import torch
import torch.multiprocessing as mp
import os, sys
print("PyTorch Version", torch.__version__)
current_path = os.path.dirname(os.path.realpath(__file__))
PROJECT_HOME = os.path.abspath(os.path.join(current_path, os.pardir, os.pardir))
if PROJECT_HOME not in sys.path:
sys.path.append(PROJECT_HOME)
from common.logger import get_logger
from rl_main import rl_utils
from common.fast_rl.rl_agent import float32_preprocessor
from common.fast_rl import actions, rl_agent, experience_single
from common.fast_rl.common import statistics, utils
from config.parameters import PARAMETERS as params
MODEL_SAVE_DIR = os.path.join(PROJECT_HOME, "out", "model_save_files")
if not os.path.exists(MODEL_SAVE_DIR):
os.makedirs(MODEL_SAVE_DIR)
os.environ['KMP_DUPLICATE_LIB_OK'] = 'True'
if torch.cuda.is_available():
device = torch.device("cuda" if params.CUDA else "cpu")
else:
device = torch.device("cpu")
my_logger = get_logger("openai_pendulum_d4pg")
DELTA_Z = (params.V_MAX - params.V_MIN) / (params.N_ATOMS - 1)
def play_func(exp_queue, env, net):
print(env.action_space.low[0], env.action_space.high[0])
action_min = env.action_space.low[0]
action_max = env.action_space.high[0]
action_selector = actions.EpsilonGreedyD4PGActionSelector(epsilon=params.EPSILON_INIT)
epsilon_tracker = actions.EpsilonTracker(
action_selector=action_selector,
eps_start=params.EPSILON_INIT,
eps_final=params.EPSILON_MIN,
eps_frames=params.EPSILON_MIN_STEP
)
agent = rl_agent.AgentD4PG(
net, n_actions=1, action_selector=action_selector,
action_min=action_min, action_max=action_max, device=device, preprocessor=float32_preprocessor
)
experience_source = experience_single.ExperienceSourceSingleEnvFirstLast(
env, agent, gamma=params.GAMMA, steps_count=params.N_STEP, step_length=-1
)
exp_source_iter = iter(experience_source)
if params.DRAW_VIZ:
stat = statistics.StatisticsForPolicyBasedRL(method="policy_gradient")
else:
stat = None
step_idx = 0
best_mean_episode_reward = 0.0
with utils.RewardTracker(params=params, frame=False, stat=stat) as reward_tracker:
while step_idx < params.MAX_GLOBAL_STEP:
# 1 스텝 진행하고 exp를 exp_queue에 넣음
step_idx += 1
exp = next(exp_source_iter)
exp_queue.put(exp)
epsilon_tracker.udpate(step_idx)
episode_rewards = experience_source.pop_episode_reward_lst()
if episode_rewards:
current_episode_reward = episode_rewards[0]
solved, mean_episode_reward = reward_tracker.set_episode_reward(
current_episode_reward, step_idx, epsilon=action_selector.epsilon
)
model_save_condition = [
reward_tracker.mean_episode_reward > best_mean_episode_reward,
step_idx > params.EPSILON_MIN_STEP
]
if reward_tracker.mean_episode_reward > best_mean_episode_reward:
best_mean_episode_reward = reward_tracker.mean_episode_reward
if all(model_save_condition) or solved:
rl_agent.save_model(
MODEL_SAVE_DIR, params.ENVIRONMENT_ID.value, net.__name__, net, step_idx, mean_episode_reward
)
if solved:
break
exp_queue.put(None)
def main():
mp.set_start_method('spawn')
env = rl_utils.get_environment(owner="worker", params=params)
print("env:", params.ENVIRONMENT_ID)
print("observation_space:", env.observation_space)
print("action_space:", env.action_space)
rl_algorithm = rl_utils.get_rl_algorithm(env=env, worker_id=0, logger=my_logger, params=params)
exp_queue = mp.Queue(maxsize=params.TRAIN_STEP_FREQ * 2)
play_proc = mp.Process(target=play_func, args=(exp_queue, env, rl_algorithm.model))
play_proc.start()
time.sleep(0.5)
step_idx = 0
while play_proc.is_alive():
step_idx += params.TRAIN_STEP_FREQ
exp = None
for _ in range(params.TRAIN_STEP_FREQ):
exp = exp_queue.get()
if exp is None:
play_proc.join()
break
rl_algorithm.buffer._add(exp)
if len(rl_algorithm.buffer) < params.MIN_REPLAY_SIZE_FOR_TRAIN:
continue
if exp is not None and exp.last_state is None:
for _ in range(3):
rl_algorithm.train_net(step_idx=step_idx)
if __name__ == "__main__":
main()
|
py | 1a43c681bcc46171e97aedceed607ad16bddd9c3 | #!/usr/bin/env python
import os
import numpy as np
import gippy as gp
import unittest
import gippy.test as gpt
# from nose.tools import raises
"""
Included are some tests for doing processing in NumPy instead of Gippy,
for doing speed comparisons. To see the durations of each test use:
$ nosetests test --with-timer -v
"""
class GeoRasterTests(unittest.TestCase):
""" Speed tests vs NumPy """
def setUp(self):
""" Configure options """
gp.Options.set_verbose(1)
gp.Options.set_chunksize(256.0)
def test_size(self):
""" Retrieve size and dimension in pixels """
# note that xsize and ysize are redefined in GeoRaster from
# GeoResource, thus it is tested again
geoimg = gp.GeoImage.create(xsz=500, ysz=1500)
self.assertEqual(geoimg.xsize(), 500)
self.assertEqual(geoimg.ysize(), 1500)
self.assertEqual(geoimg.size(), 1500*500)
def test_type(self):
""" Set datatype on create and verify """
geoimg = gp.GeoImage.create(dtype='uint32')
self.assertEqual(geoimg.type().string(), 'uint32')
def test_naming(self):
""" Get basename and desription """
fout = 'test-image.tif'
bname = os.path.splitext(fout)[0]
bandnames = ['red', 'green', 'blue']
geoimg = gp.GeoImage.create(fout, nb=3)
geoimg.set_bandnames(bandnames)
for i in range(0, 3):
self.assertEqual(geoimg[i].description(), bandnames[i])
self.assertEqual(geoimg[i].basename(), '%s[%s]' % (bname, i))
os.remove(fout)
# TODO - test color
def test_gain_and_offset(self):
""" Set and retrieve gain and offset """
fout = 'test-gainoffset.tif'
gains = [2.0, 3.0]
offsets = [4.0, 5.0]
geoimg = gp.GeoImage.create(fout, nb=2)
geoimg[0].set_gain(gains[0])
geoimg[1].set_gain(gains[1])
geoimg[0].set_offset(offsets[0])
geoimg[1].set_offset(offsets[1])
# check persistance
geoimg = None
geoimg = gp.GeoImage(fout)
for i in range(0, 2):
self.assertEqual(geoimg[i].gain(), gains[i])
self.assertEqual(geoimg[i].offset(), offsets[i])
os.remove(fout)
def test_nodata(self):
""" Set nodata and retrieve """
fout = 'test-nodata.tif'
geoimg = gp.GeoImage.create(fout, xsz=100, ysz=100)
geoimg.set_nodata(1)
self.assertEqual(geoimg[0].nodata(), 1)
geoimg = None
geoimg = gp.GeoImage(fout)
self.assertEqual(geoimg[0].nodata(), 1)
# check that entire array is nan
arr = np.where(geoimg.read() == np.nan)
self.assertEqual(len(arr[0]), 0)
self.assertEqual(len(arr[1]), 0)
os.remove(fout)
def test_bandmeta(self):
""" Set metadata on band and retrieve """
fout = 'test-meta.tif'
geoimg = gp.GeoImage.create(fout, xsz=100, ysz=100)
geoimg[0].add_bandmeta('TESTKEY', 'TESTVALUE')
geoimg = None
geoimg = gp.GeoImage(fout)
self.assertEqual(geoimg[0].bandmeta('TESTKEY'), 'TESTVALUE')
os.remove(fout)
# TODO - test masking
def test_stats(self):
""" Calculate statistics using gippy """
geoimg = gpt.get_test_image()
for band in geoimg:
stats = band.stats()
mask = band.data_mask() == 1
# check against numpy
arr = band.read()
self.assertAlmostEqual(arr[mask].min(), stats[0])
self.assertAlmostEqual(arr[mask].max(), stats[1])
self.assertAlmostEqual(arr[mask].mean(), stats[2], places=2)
def test_scale(self):
""" Scale image to byte range """
geoimg = gpt.get_test_image()
for band in geoimg:
band = band.autoscale(minout=1, maxout=255, percent=2.0)
self.assertTrue(band.min() == 1)
self.assertTrue(band.max() == 255)
def test_histogram(self):
""" Calculate histogram of blank data """
geoimg = gp.GeoImage.create(xsz=10, ysz=10, nb=2)
arr = np.arange(10).reshape(1, 10) + 1
for i in range(9):
arr = np.append(arr, arr, axis=0)
geoimg[0].write(arr.astype('uint8'))
hist = geoimg[0].histogram(bins=10, normalize=False)
self.assertEqual(hist[0], 10)
self.assertEqual(hist.sum(), geoimg.size())
hist = geoimg[0].histogram(bins=10)
self.assertAlmostEqual(hist.sum(), 1.0)
self.assertAlmostEqual(hist[0], 0.1)
hist = geoimg[0].histogram(bins=10, normalize=False, cumulative=True)
self.assertAlmostEqual(hist[-1], geoimg.size())
def test_real_histogram(self):
""" Calculate histogram of real data """
geoimg = gpt.get_test_image()
hist = geoimg[0].histogram(normalize=False)
self.assertEqual(len(hist), 100)
self.assertEqual(hist.sum(), geoimg.size())
def test_sqrt(self):
""" Calculate sqrt of image """
geoimg = gpt.get_test_image().select(['red', 'green', 'swir1', 'nir'])
for band in geoimg:
vals = band.sqrt().read()
mask = band.data_mask() == 1
# check against numpy
arr = band.read()
self.assertTrue((vals[mask] == np.sqrt(arr[mask])).any())
# TODO - test processing functions
# Test filters
def test_laplacian(self):
""" Test with laplacian filter """
geoimg = gp.GeoImage.create(xsz=10, ysz=10)
arr = geoimg.read()
arr[:, 0:6] = 1
geoimg[0].write(arr)
arrout = geoimg[0].laplacian().read()
self.assertEqual(arrout[0, 5], -1.)
self.assertEqual(arrout[0, 6], 1.)
def test_convolve(self):
""" Convolve an image with a 3x3 kernel """
geoimg = gp.GeoImage.create(xsz=10, ysz=10)
arr = geoimg.read() + 1
geoimg[0].write(arr)
kernel = np.array([[1, 1, 1], [1, 1, 1], [1, 1, 1]])
arrout = geoimg[0].convolve(kernel, boundary=False).read()
self.assertEqual(arrout[0, 0], 4)
self.assertEqual(arrout[5, 5], 9)
self.assertEqual(arrout[5, 0], 6)
def test_skeletonize(self):
""" Skeletonize a binary imager """
geoimg = gp.GeoImage.create(xsz=10, ysz=10)
arr = geoimg.read()
arr[3:8, :] = 1
geoimg[0].write(arr)
arrout = geoimg[0].skeletonize().read()
def test_write(self):
""" Write arrays of different datatype """
geoimg = gp.GeoImage.create(xsz=100, ysz=100, dtype='uint8')
arr = np.ones((100, 100)).astype('uint8')
geoimg[0].write(arr)
self.assertTrue(np.array_equal(arr, geoimg[0].read()))
arr = np.ones((100, 100)).astype('float32')
geoimg[0].write(arr)
self.assertTrue(np.array_equal(arr, geoimg[0].read()))
"""
def test_invalid_args(self):
# Check that invalid arguments throw error
geoimg = gippy.GeoImage.create(xsz=100, ysz=100, dtype='uint8')
try:
geoimg[0].write('invalid arg')
geoimg[0].write([1.0, 1.0])
self.assertTrue(False)
except:
pass
"""
|
py | 1a43c7c61095ef6b028fda932541ea966dd5518b | import functools as ft
import jax
import jax.numpy as jnp
import pytest
import equinox as eqx
def test_basic():
a = [jnp.array(3), jnp.array(2)]
b = [jnp.array(4), jnp.array(5)]
index = eqx.experimental.StateIndex()
eqx.experimental.set_state(index, a)
assert eqx.experimental.get_state(index, b) == a
def test_jit():
index = eqx.experimental.StateIndex()
@eqx.filter_jit
def set_state(x):
eqx.experimental.set_state(index, x)
@eqx.filter_jit
def get_state(x):
return eqx.experimental.get_state(index, x)
a = [jnp.array(3), jnp.array(2)]
b = [jnp.array(4), jnp.array(5)]
set_state(a)
assert get_state(b) == a
def test_no_nonjaxarray():
c = 0
index = eqx.experimental.StateIndex()
with pytest.raises(TypeError):
eqx.experimental.set_state(index, c)
d = object()
index = eqx.experimental.StateIndex()
with pytest.raises(TypeError):
eqx.experimental.set_state(index, d)
e = [jnp.array(2), 0]
index = eqx.experimental.StateIndex()
with pytest.raises(TypeError):
eqx.experimental.set_state(index, e)
def test_no_set():
index = eqx.experimental.StateIndex()
a = jnp.array(2)
with pytest.raises(KeyError):
eqx.experimental.get_state(index, a)
def test_no_change_shape():
index1 = eqx.experimental.StateIndex()
index2 = eqx.experimental.StateIndex()
@jax.jit
def set_state1():
eqx.experimental.set_state(index1, jnp.array(1))
eqx.experimental.set_state(index1, jnp.array([2]))
@jax.jit
def set_state2():
eqx.experimental.set_state(index2, jnp.array(1))
eqx.experimental.set_state(index2, [jnp.array(1)])
with pytest.raises(TypeError):
set_state1()
with pytest.raises(TypeError):
set_state2()
def test_index_jittable():
index1 = eqx.experimental.StateIndex()
index2 = eqx.experimental.StateIndex()
@eqx.filter_jit
def get_state(i, x):
return eqx.experimental.get_state(i, x)
a = [jnp.array(3), jnp.array(2)]
b = [jnp.array(4), jnp.array(5)]
c = [jnp.array(6), jnp.array(9)]
d = [jnp.array(7), jnp.array(8)]
eqx.experimental.set_state(index1, a)
eqx.experimental.set_state(index2, b)
assert get_state(index1, c) == a
assert get_state(index2, d) == b
def test_vmap():
index1 = eqx.experimental.StateIndex()
index2 = eqx.experimental.StateIndex()
@ft.partial(jax.vmap, in_axes=(None, 0))
def vmap_set_state(i, x):
eqx.experimental.set_state(i, x)
@ft.partial(jax.vmap, in_axes=(None, 0))
def vmap_get_state(i, x):
return eqx.experimental.get_state(i, x)
a = jnp.array([1, 2])
b = jnp.array([3, 4])
vmap_set_state(index1, a)
assert jnp.array_equal(vmap_get_state(index1, b), a)
with pytest.raises(TypeError):
# setting state without vmap, after setting state with vmap
eqx.experimental.set_state(index1, a)
with pytest.raises(TypeError):
# getting state without vmap, after setting state with vmap
eqx.experimental.get_state(index1, b)
eqx.experimental.set_state(index2, a)
with pytest.raises(TypeError):
# setting state with vmap, after setting state without vmap
vmap_set_state(index2, a)
with pytest.raises(TypeError):
# getting state with vmap, after setting state without vmap
vmap_get_state(index2, a)
def test_multi_vmap():
index = eqx.experimental.StateIndex()
@jax.vmap
@jax.vmap
def set_state(x):
eqx.experimental.set_state(index, x)
@jax.vmap
@jax.vmap
def get_state(y):
return eqx.experimental.get_state(index, y)
@ft.partial(jax.vmap, in_axes=(1,))
@ft.partial(jax.vmap, in_axes=(0,))
def get_state_bad(y):
return eqx.experimental.get_state(index, y)
a = jnp.array([[1, 2]])
b = jnp.array([[3, 4]])
set_state(a)
assert jnp.array_equal(get_state(b), a)
with pytest.raises(TypeError):
eqx.experimental.get_state(index, b)
with pytest.raises(TypeError):
get_state_bad(b)
|
py | 1a43c930853e68be05159c4de984aa95eec78da8 | # Copyright 2020 The SQLFlow Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pandas as pd
import scipy
import six
import runtime.temp_file as temp_file
import runtime.xgboost as xgboost_extended
import shap
import xgboost as xgb
from runtime import db, explainer
from runtime.dbapi.paiio import PaiIOConnection
from runtime.feature.compile import compile_ir_feature_columns
from runtime.feature.derivation import get_ordered_field_descs
from runtime.model import EstimatorType
from runtime.model.model import Model
from runtime.pai.pai_distributed import define_tf_flags
FLAGS = define_tf_flags()
def explain(datasource,
select,
explainer,
model_params,
result_table,
model,
pai_table="",
oss_model_path="",
oss_dest=None,
oss_ak=None,
oss_sk=None,
oss_endpoint=None,
oss_bucket_name=None):
"""TBD
"""
if model_params is None:
model_params = {}
summary_params = dict()
for k in model_params:
if k.startswith("summary."):
summary_key = k.replace("summary.", "")
summary_params[summary_key] = model_params[k]
bst = xgb.Booster()
if isinstance(model, six.string_types):
with temp_file.TemporaryDirectory(as_cwd=True):
model = Model.load_from_db(datasource, model)
bst.load_model("my_model")
else:
assert isinstance(model,
Model), "not supported model type %s" % type(model)
bst.load_model("my_model")
fc_map_ir = model.get_meta("features")
label_meta = model.get_meta("label").get_field_desc()[0].to_dict(
dtype_to_string=True)
field_descs = get_ordered_field_descs(fc_map_ir)
feature_column_names = [fd.name for fd in field_descs]
feature_metas = dict([(fd.name, fd.to_dict(dtype_to_string=True))
for fd in field_descs])
is_pai = True if pai_table else False
# NOTE: in the current implementation, we are generating a transform_fn
# from the COLUMN clause. The transform_fn is executed during the process
# of dumping the original data into DMatrix SVM file.
compiled_fc = compile_ir_feature_columns(fc_map_ir, EstimatorType.XGBOOST)
transform_fn = xgboost_extended.feature_column.ComposedColumnTransformer(
feature_column_names, *compiled_fc["feature_columns"])
dataset = xgb_shap_dataset(datasource, select, feature_column_names,
label_meta, feature_metas, is_pai, pai_table,
transform_fn)
if explainer == "XGBoostExplainer":
xgb_native_explain(bst, datasource, result_table)
else:
# when explainer is "" or "TreeExplainer" use SHAP by default.
shap_explain(bst,
datasource,
dataset,
summary_params,
result_table,
is_pai=is_pai,
oss_dest=oss_dest,
oss_ak=oss_ak,
oss_sk=oss_sk,
oss_endpoint=oss_endpoint,
oss_bucket_name=oss_bucket_name)
def shap_explain(booster,
datasource,
dataset,
summary_params,
result_table="",
is_pai=False,
oss_dest=None,
oss_ak=None,
oss_sk=None,
oss_endpoint=None,
oss_bucket_name=None):
tree_explainer = shap.TreeExplainer(booster)
shap_values = tree_explainer.shap_values(dataset)
if result_table:
if is_pai:
conn = PaiIOConnection.from_table(result_table)
else:
conn = db.connect_with_data_source(datasource)
# TODO(typhoonzero): the shap_values is may be a
# list of shape [3, num_samples, num_features],
# use the first dimension here, should find out
# when to use the other two. When shap_values is
# not a list it can be directly used.
if isinstance(shap_values, list):
to_write = shap_values[0]
else:
to_write = shap_values
columns = list(dataset.columns)
with db.buffered_db_writer(conn, result_table, columns) as w:
for row in to_write:
w.write(list(row))
conn.close()
if summary_params.get("plot_type") == "decision":
shap_interaction_values = tree_explainer.shap_interaction_values(
dataset)
expected_value = tree_explainer.expected_value
if isinstance(shap_interaction_values, list):
shap_interaction_values = shap_interaction_values[0]
if isinstance(expected_value, list):
expected_value = expected_value[0]
plot_func = lambda: shap.decision_plot( # noqa: E731
expected_value,
shap_interaction_values,
dataset,
show=False,
feature_display_range=slice(None, -40, -1),
alpha=1)
else:
plot_func = lambda: shap.summary_plot( # noqa: E731
shap_values, dataset, show=False, **summary_params)
explainer.plot_and_save(plot_func,
oss_dest=oss_dest,
oss_ak=oss_ak,
oss_sk=oss_sk,
oss_endpoint=oss_endpoint,
oss_bucket_name=oss_bucket_name,
filename='summary')
def xgb_native_explain(booster, datasource, result_table):
if not result_table:
raise ValueError(
"XGBoostExplainer must use with INTO to output result to a table.")
gain_map = booster.get_score(importance_type="gain")
fscore_map = booster.get_fscore()
conn = db.connect_with_data_source(datasource)
all_feature_keys = list(gain_map.keys())
all_feature_keys.sort()
columns = ["feature", "fscore", "gain"]
with db.buffered_db_writer(conn, result_table, columns) as w:
for fkey in all_feature_keys:
row = [fkey, fscore_map[fkey], gain_map[fkey]]
w.write(list(row))
conn.close()
def infer_data_type(feature):
if isinstance(feature, np.ndarray):
if feature.dtype == np.float32 or feature.dtype == np.float64:
return 'float32'
elif feature.dtype == np.int32 or feature.dtype == np.int64:
return 'int64'
else:
raise ValueError('Not supported data type {}'.format(
feature.dtype))
elif isinstance(feature, (np.float32, np.float64, float)):
return 'float32'
elif isinstance(feature, (np.int32, np.int64, six.integer_types)):
return 'int64'
else:
raise ValueError('Not supported data type {}'.format(type(feature)))
def xgb_shap_dataset(datasource,
select,
feature_column_names,
label_meta,
feature_metas,
is_pai,
pai_explain_table,
transform_fn=None):
if is_pai:
# (TODO: lhw) we may specify pai_explain_table in datasoure
# and discard the condition statement here
conn = PaiIOConnection.from_table(pai_explain_table)
stream = db.db_generator(conn, None, label_meta)
else:
conn = db.connect_with_data_source(datasource)
stream = db.db_generator(conn, select, label_meta)
selected_cols = db.selected_cols(conn, select)
if transform_fn:
feature_names = transform_fn.get_feature_column_names()
else:
feature_names = feature_column_names
xs = None
dtypes = []
sizes = []
offsets = []
i = 0
for row, label in stream():
features = db.read_features_from_row(row,
selected_cols,
feature_column_names,
feature_metas,
is_xgboost=True)
if transform_fn:
features = transform_fn(features)
flatten_features = []
for j, feature in enumerate(features):
if len(feature) == 3: # convert sparse to dense
col_indices, values, dense_shape = feature
size = int(np.prod(dense_shape))
row_indices = np.zeros(shape=[col_indices.size])
sparse_matrix = scipy.sparse.csr_matrix(
(values, (row_indices, col_indices)), shape=[1, size])
values = sparse_matrix.toarray()
else:
values = feature[0]
if isinstance(values, np.ndarray):
flatten_features.extend(values.flatten().tolist())
if i == 0:
sizes.append(values.size)
dtypes.append(infer_data_type(values))
else:
flatten_features.append(values)
if i == 0:
sizes.append(1)
dtypes.append(infer_data_type(values))
# Create the column name according to the feature number
# of each column.
#
# If the column "c" contains only 1 feature, the result
# column name would be "c" too.
#
# If the column "c" contains 3 features,
# the result column name would be "c_0", "c_1" and "c_2"
if i == 0:
offsets = np.cumsum([0] + sizes)
column_names = []
for j in six.moves.range(len(offsets) - 1):
start = offsets[j]
end = offsets[j + 1]
if end - start == 1:
column_names.append(feature_names[j])
else:
for k in six.moves.range(start, end):
column_names.append('{}_{}'.format(
feature_names[j], k))
xs = pd.DataFrame(columns=column_names)
xs.loc[i] = flatten_features
i += 1
columns = xs.columns
for i, dtype in enumerate(dtypes):
for j in six.moves.range(offsets[i], offsets[i + 1]):
xs[columns[j]] = xs[columns[j]].astype(dtype)
return xs
|
py | 1a43c9e6863d8d6b59b36f5704a8acde82d3921d | class Solution:
def findPeakElement(self, nums: List[int]) -> int:
l,r = 0,len(nums)-1
while l<r:
mid = (l+r)//2
if nums[mid]<nums[mid+1]:
l=mid+1
else:
r=mid
return l
|
py | 1a43caa837f335451ff3b0dacd5af6718cb2bcc3 | """
MIT License
Copyright (c) 2021 Suffyx
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from core import Thalia
from plugins.rooms.events import EventHandler
from plugins.rooms.commands import RoomCommands
def setup(bot: Thalia):
"""Sets up the cogs from the core module.
Parameters:
bot: core.Thalia - The bot the cog is loaded onto. Passed by discord.py
"""
bot.add_cog(EventHandler(bot))
bot.add_cog(RoomCommands(bot))
|
py | 1a43caffd77904bdc46cfa4bfc21a99a89ac0705 | # ------------------------------------------------------------
# Copyright (c) 2017-present, SeetaTech, Co.,Ltd.
#
# Licensed under the BSD 2-Clause License.
# You should have received a copy of the BSD 2-Clause License
# along with the software. If not, See,
#
# <https://opensource.org/licenses/BSD-2-Clause>
#
# ------------------------------------------------------------
"""Backends module."""
from __future__ import absolute_import as _absolute_import
from __future__ import division as _division
from __future__ import print_function as _print_function
# Modules
from dragon.vm.torch.core.backends import cudnn
__all__ = [_s for _s in dir() if not _s.startswith('_')]
|
py | 1a43cb4989a09a3df3fee1088349abb50bc010db | """Support for Nanoleaf Lights."""
from __future__ import annotations
import math
from typing import Any
from aionanoleaf import Nanoleaf
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_HS_COLOR,
ATTR_TRANSITION,
ColorMode,
LightEntity,
LightEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.util.color import (
color_temperature_kelvin_to_mired as kelvin_to_mired,
color_temperature_mired_to_kelvin as mired_to_kelvin,
)
from . import NanoleafEntryData
from .const import DOMAIN
from .entity import NanoleafEntity
RESERVED_EFFECTS = ("*Solid*", "*Static*", "*Dynamic*")
DEFAULT_NAME = "Nanoleaf"
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the Nanoleaf light."""
entry_data: NanoleafEntryData = hass.data[DOMAIN][entry.entry_id]
async_add_entities([NanoleafLight(entry_data.device, entry_data.coordinator)])
class NanoleafLight(NanoleafEntity, LightEntity):
"""Representation of a Nanoleaf Light."""
_attr_supported_color_modes = {ColorMode.COLOR_TEMP, ColorMode.HS}
_attr_supported_features = LightEntityFeature.EFFECT | LightEntityFeature.TRANSITION
def __init__(self, nanoleaf: Nanoleaf, coordinator: DataUpdateCoordinator) -> None:
"""Initialize the Nanoleaf light."""
super().__init__(nanoleaf, coordinator)
self._attr_unique_id = nanoleaf.serial_no
self._attr_name = nanoleaf.name
self._attr_min_mireds = math.ceil(1000000 / nanoleaf.color_temperature_max)
self._attr_max_mireds = kelvin_to_mired(nanoleaf.color_temperature_min)
@property
def brightness(self) -> int:
"""Return the brightness of the light."""
return int(self._nanoleaf.brightness * 2.55)
@property
def color_temp(self) -> int:
"""Return the current color temperature."""
return kelvin_to_mired(self._nanoleaf.color_temperature)
@property
def effect(self) -> str | None:
"""Return the current effect."""
# The API returns the *Solid* effect if the Nanoleaf is in HS or CT mode.
# The effects *Static* and *Dynamic* are not supported by Home Assistant.
# These reserved effects are implicitly set and are not in the effect_list.
# https://forum.nanoleaf.me/docs/openapi#_byoot0bams8f
return (
None if self._nanoleaf.effect in RESERVED_EFFECTS else self._nanoleaf.effect
)
@property
def effect_list(self) -> list[str]:
"""Return the list of supported effects."""
return self._nanoleaf.effects_list
@property
def icon(self) -> str:
"""Return the icon to use in the frontend, if any."""
return "mdi:triangle-outline"
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._nanoleaf.is_on
@property
def hs_color(self) -> tuple[int, int]:
"""Return the color in HS."""
return self._nanoleaf.hue, self._nanoleaf.saturation
@property
def color_mode(self) -> ColorMode | None:
"""Return the color mode of the light."""
# According to API docs, color mode is "ct", "effect" or "hs"
# https://forum.nanoleaf.me/docs/openapi#_4qgqrz96f44d
if self._nanoleaf.color_mode == "ct":
return ColorMode.COLOR_TEMP
# Home Assistant does not have an "effect" color mode, just report hs
return ColorMode.HS
async def async_turn_on(self, **kwargs: Any) -> None:
"""Instruct the light to turn on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
hs_color = kwargs.get(ATTR_HS_COLOR)
color_temp_mired = kwargs.get(ATTR_COLOR_TEMP)
effect = kwargs.get(ATTR_EFFECT)
transition = kwargs.get(ATTR_TRANSITION)
if effect:
if effect not in self.effect_list:
raise ValueError(
f"Attempting to apply effect not in the effect list: '{effect}'"
)
await self._nanoleaf.set_effect(effect)
elif hs_color:
hue, saturation = hs_color
await self._nanoleaf.set_hue(int(hue))
await self._nanoleaf.set_saturation(int(saturation))
elif color_temp_mired:
await self._nanoleaf.set_color_temperature(
mired_to_kelvin(color_temp_mired)
)
if transition:
if brightness: # tune to the required brightness in n seconds
await self._nanoleaf.set_brightness(
int(brightness / 2.55), transition=int(kwargs[ATTR_TRANSITION])
)
else: # If brightness is not specified, assume full brightness
await self._nanoleaf.set_brightness(100, transition=int(transition))
else: # If no transition is occurring, turn on the light
await self._nanoleaf.turn_on()
if brightness:
await self._nanoleaf.set_brightness(int(brightness / 2.55))
async def async_turn_off(self, **kwargs: Any) -> None:
"""Instruct the light to turn off."""
transition: float | None = kwargs.get(ATTR_TRANSITION)
await self._nanoleaf.turn_off(None if transition is None else int(transition))
|
py | 1a43cbd561bcdc8faf0066fde4ba1e57258c1539 | from datetime import datetime, timedelta, date
import logging
import traceback
from decimal import *
import json
import calendar
import geojson
import requests
import io
from django.conf import settings
from django.core.urlresolvers import reverse, reverse_lazy
from django.core.exceptions import ValidationError
from django.db import transaction
from django.db.models import Q
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import redirect
from django.utils import timezone
from dateutil.tz.tz import tzoffset
from pytz import timezone as pytimezone
from ledger.payments.models import Invoice,OracleInterface,CashTransaction
from ledger.payments.utils import oracle_parser_on_invoice,update_payments
from ledger.checkout.utils import create_basket_session, create_checkout_session, place_order_submission, get_cookie_basket
from mooring.models import (MooringArea, Mooringsite, MooringsiteRate, MooringsiteBooking, Booking, BookingInvoice, MooringsiteBookingRange, Rate, MooringAreaBookingRange,MooringAreaStayHistory, MooringsiteRate, MarinaEntryRate, BookingVehicleRego, AdmissionsBooking, AdmissionsOracleCode, AdmissionsRate, AdmissionsLine, ChangePricePeriod, CancelPricePeriod, GlobalSettings, MooringAreaGroup, AdmissionsLocation, ChangeGroup, CancelGroup, BookingPeriod, BookingPeriodOption, AdmissionsBookingInvoice, BookingAnnualAdmission)
from mooring import models
from mooring.serialisers import BookingRegoSerializer, MooringsiteRateSerializer, MarinaEntryRateSerializer, RateSerializer, MooringsiteRateReadonlySerializer, AdmissionsRateSerializer
from mooring.emails import send_booking_invoice,send_booking_confirmation
from mooring import emails
from oscar.apps.order.models import Order
from ledger.payments.invoice import utils
from mooring import models
logger = logging.getLogger('booking_checkout')
def create_booking_by_class(campground_id, campsite_class_id, start_date, end_date, num_adult=0, num_concession=0, num_child=0, num_infant=0, num_mooring=0, vessel_size=0):
"""Create a new temporary booking in the system."""
# get campground
campground = MooringArea.objects.get(pk=campground_id)
# TODO: date range check business logic
# TODO: number of people check? this is modifiable later, don't bother
# the MooringsiteBooking table runs the risk of a race condition,
# wrap all this behaviour up in a transaction
with transaction.atomic():
# fetch all the campsites and applicable rates for the campground
sites_qs = Mooringsite.objects.filter(
mooringarea=campground,
campsite_class=campsite_class_id
)
if not sites_qs.exists():
raise ValidationError('No matching campsites found.')
# get availability for sites, filter out the non-clear runs
availability = get_campsite_availability(sites_qs, start_date, end_date)
excluded_site_ids = set()
for site_id, dates in availability.items():
if not all([v[0] == 'open' for k, v in dates.items()]):
excluded_site_ids.add(site_id)
# create a list of campsites without bookings for that period
sites = [x for x in sites_qs if x.pk not in excluded_site_ids]
if not sites:
raise ValidationError('Mooringsite class unavailable for specified time period.')
# TODO: add campsite sorting logic based on business requirements
# for now, pick the first campsite in the list
site = sites[0]
# Prevent booking if max people passed
total_people = num_adult + num_concession + num_child + num_infant + num_mooring
if total_people > site.max_people:
raise ValidationError('Maximum number of people exceeded for the selected campsite')
# Prevent booking if less than min people
if total_people < site.min_people:
raise ValidationError('Number of people is less than the minimum allowed for the selected campsite')
# Create a new temporary booking with an expiry timestamp (default 20mins)
booking = Booking.objects.create(
booking_type=3,
arrival=start_date,
departure=end_date,
details={
'num_adult': num_adult,
'num_concession': num_concession,
'num_child': num_child,
'num_infant': num_infant,
'num_mooring' : num_mooring,
'vessel_size' : vessel_size
},
expiry_time=timezone.now()+timedelta(seconds=settings.BOOKING_TIMEOUT),
mooringarea=campground
)
for i in range((end_date-start_date).days):
cb = MooringsiteBooking.objects.create(
campsite=site,
booking_type=3,
date=start_date+timedelta(days=i),
booking=booking
)
# On success, return the temporary booking
return booking
def create_booking_by_site(sites_qs, start_date, end_date, num_adult=0, num_concession=0, num_child=0, num_infant=0, num_mooring=0, vessel_size=0, cost_total=0, override_price=None, override_reason=None, override_reason_info=None, send_invoice=False, overridden_by=None, customer=None, updating_booking=False, override_checks=False):
"""Create a new temporary booking in the system for a set of specific campsites."""
# the CampsiteBooking table runs the risk of a race condition,
# wrap all this behaviour up in a transaction
campsite_qs = Mooringsite.objects.filter(pk__in=sites_qs)
with transaction.atomic():
# get availability for campsite, error out if booked/closed
availability = get_campsite_availability(campsite_qs, start_date, end_date, False)
for site_id, dates in availability.items():
if not override_checks:
if updating_booking:
if not all([v[0] in ['open','tooearly'] for k, v in dates.items()]):
raise ValidationError('Mooring unavailable for specified time period.')
else:
if not all([v[0] == 'open' for k, v in dates.items()]):
raise ValidationError('Mooring unavailable for specified time period.')
else:
if not all([v[0] in ['open','tooearly','closed'] for k, v in dates.items()]):
raise ValidationError('Mooring unavailable for specified time period.')
if not override_checks:
# Prevent booking if max people passed
total_people = num_adult + num_concession + num_child + num_infant
min_people = sum([cs.min_people for cs in campsite_qs])
max_people = sum([cs.max_people for cs in campsite_qs])
if total_people > max_people:
raise ValidationError('Maximum number of people exceeded')
# Prevent booking if less than min people
#if total_people < min_people:
# raise ValidationError('Number of people is less than the minimum allowed for the selected campsite(s)')
# Create a new temporary booking with an expiry timestamp (default 20mins)
booking = Booking.objects.create(
booking_type=3,
arrival=start_date,
departure=end_date,
details={
'num_adult': num_adult,
'num_concession': num_concession,
'num_child': num_child,
'num_infant': num_infant,
'num_mooring': num_mooring,
'vessel_size': vessel_size
},
cost_total = cost_total,
override_price = Decimal(override_price) if (override_price is not None) else None,
override_reason = override_reason,
override_reason_info = override_reason_info,
send_invoice = send_invoice,
overridden_by = overridden_by,
expiry_time=timezone.now()+timedelta(seconds=settings.BOOKING_TIMEOUT),
mooringarea=campsite_qs[0].mooringarea,
customer = customer
)
for cs in campsite_qs:
for i in range((end_date-start_date).days):
cb = MooringsiteBooking.objects.create(
campsite=cs,
booking_type=3,
date=start_date+timedelta(days=i),
booking=booking
)
# On success, return the temporary booking
return booking
def ooolldcreate_booking_by_site(campsite_id, start_date, end_date, num_adult=0, num_concession=0, num_child=0, num_infant=0,num_mooring=0,vessel_size=0,cost_total=0,customer=None,updating_booking=False):
"""Create a new temporary booking in the system for a specific campsite."""
# get campsite
sites_qs = Mooringsite.objects.filter(pk=campsite_id)
campsite = sites_qs.first()
# TODO: date range check business logic
# TODO: number of people check? this is modifiable later, don't bother
# the MooringsiteBooking table runs the risk of a race condition,
# wrap all this behaviour up in a transaction
with transaction.atomic():
# get availability for campsite, error out if booked/closed
availability = get_campsite_availability(sites_qs, start_date, end_date)
for site_id, dates in availability.items():
if updating_booking:
if not all([v[0] in ['open','tooearly'] for k, v in dates.items()]):
raise ValidationError('Mooringsite unavailable for specified time period.')
else:
if not all([v[0] == 'open' for k, v in dates.items()]):
raise ValidationError('Mooringsite unavailable for specified time period.')
# Prevent booking if max people passed
total_people = num_adult + num_concession + num_child + num_infant + num_mooring
if total_people > campsite.max_people:
raise ValidationError('Maximum number of people exceeded for the selected campsite')
# Prevent booking if less than min people
if total_people < campsite.min_people:
raise ValidationError('Number of people is less than the minimum allowed for the selected campsite')
# Create a new temporary booking with an expiry timestamp (default 20mins)
booking = Booking.objects.create(
booking_type=3,
arrival=start_date,
departure=end_date,
details={
'num_adult': num_adult,
'num_concession': num_concession,
'num_child': num_child,
'num_infant': num_infant,
'num_mooring': num_mooring,
'vessel_size': vessel_size
},
cost_total= Decimal(cost_total),
expiry_time=timezone.now()+timedelta(seconds=settings.BOOKING_TIMEOUT),
mooringarea=campsite.mooringarea,
customer = customer
)
for i in range((end_date-start_date).days):
cb = MooringsiteBooking.objects.create(
campsite=campsite,
booking_type=3,
date=start_date+timedelta(days=i),
booking=booking
)
# On success, return the temporary booking
return booking
def check_mooring_available_by_time(campsite_id, start_date_time, end_date_time):
# Confirmed Bookings
start_time_count = MooringsiteBooking.objects.filter(
Q(campsite_id=campsite_id) &
( Q(from_dt__lte=start_date_time) & Q(to_dt__gte=start_date_time))
).exclude(booking_type__in=[3,4]).count()
end_time_count = MooringsiteBooking.objects.filter(
Q(campsite_id=campsite_id) &
( Q(from_dt__lte=end_date_time) & Q(to_dt__gte=end_date_time))
).exclude(booking_type__in=[3,4]).count()
# Temp bookings
start_time_temp_count = MooringsiteBooking.objects.filter(
Q(campsite_id=campsite_id) & Q(booking_type__in=[3]) & Q(booking__expiry_time__gte=datetime.today()) &
( Q(from_dt__lte=start_date_time) & Q(to_dt__gte=start_date_time))
).count()
end_time_temp_count = MooringsiteBooking.objects.filter(
Q(campsite_id=campsite_id) & Q(booking_type__in=[3]) & Q(booking__expiry_time__gte=datetime.today()) &
( Q(from_dt__lte=end_date_time) & Q(to_dt__gte=end_date_time))
).count()
if start_time_count > 0 or end_time_count > 0 or start_time_temp_count > 0 or end_time_temp_count >0:
return True
return False
def check_mooring_availablity(campsites_qs, start_date, end_date):
if start_date != end_date:
end_date =end_date-timedelta(days=1)
avail_results = get_campsite_availability(campsites_qs, start_date, end_date,None, None)
cs_array = {}
for av in avail_results:
open_periods = 0
closed_periods = 0
for date_rotate in avail_results[av]:
bp = avail_results[av][date_rotate][1]
for i in bp:
if avail_results[av][date_rotate][1][i] == 'open':
open_periods = open_periods + 1
else:
closed_periods = closed_periods + 1
cs_array[av] = { 'open_periods': open_periods, 'closed_periods': closed_periods}
return cs_array
def get_open_marinas(campsites_qs, start_date, end_date):
"""Fetch the set of Marine Parks (from a set of Mooring Sites) with spaces open over a range of visit dates."""
# short circuit: if start date is before today, return nothing
exclude_moorings = []
today = date.today()
#if start_date < today:
# return set()
campsites_qs = check_mooring_availablity(campsites_qs,start_date, end_date)
# remove from the campsite list any entries with bookings
# campsites_qs = campsites_qs.exclude(
# id__in=exclude_moorings
# mooringsitebooking__date__range=(start_date, end_date-timedelta(days=1))
# and also campgrounds where the book window is outside of the max advance range
# ).exclude(
# campground__max_advance_booking__lte=(start_date-today).days - 1
# mooringarea__max_advance_booking__lt=(start_date-today).days
# )
# get closures at campsite and campground level
# cgbr_qs = MooringAreaBookingRange.objects.filter(
# Q(campground__in=[x[0] for x in campsites_qs.distinct('mooringarea').values_list('mooringarea')]),
# Q(status=1),
# Q(range_start__lt=end_date) & (Q(range_end__gte=start_date)|Q(range_end__isnull=True))
# )
# cgbr = set([x[0] for x in cgbr_qs.values_list('campground')])
## cgbr = set([x[0] for x in cgbr_qs.values_list('campground')])
# csbr_qs = MooringsiteBookingRange.objects.filter(
# Q(campsite__in=campsites_qs),
# Q(status=1),
# Q(range_start__lt=end_date) & (Q(range_end__gte=start_date)|Q(range_end__isnull=True))
# )
# print csbr_qs
# csbr = set([x[0] for x in csbr_qs.values_list('campsite')])
# generate a campground-to-campsite-list map with closures removed
mooring_map = {}
for cs in campsites_qs:
# if cs == 5:
# pass
# else:
#mooring_map = {}
mooring_map[cs] = campsites_qs[cs]
#mooring_map.append(row)
# for cs in campsites_qs:
# if (cs.pk in csbr) or (cs.mooringarea.pk in cgbr):
# continue
# if cs.mooringarea.pk not in mooring_map:
# mooring_map[cs.mooringarea.pk] = []
# mooring_map[cs.mooringarea.pk].append(cs.pk)
return mooring_map
def generate_mooring_rate(mooringsites_qs,start_date, end_date, duration):
mooring_rate = {}
mooring_site_ids = []
search_filter = Q()
for ms in mooringsites_qs:
mooring_site_ids.append(ms.id)
search_filter |= Q(campsite_id=ms.id)
#print (mooring_site_ids)
mooring_rate_search_filter = Q()
mooring_rate_search_filter &= Q(search_filter)# & Q(date_start__lte=start_date) & Q(Q(date_end__gte=start_date) | Q(date_end=None))
#& Q(date_end__gte=end_date)
#& Q(date_end__lte=end_date)
mr_resp = MooringsiteRate.objects.filter(mooring_rate_search_filter).order_by('date_start')
#print (mr_resp)
for i in range(duration):
date_rotate_forward = start_date+timedelta(days=i)
mooring_rate[date_rotate_forward] = {}
for mrr in mr_resp:
# this is to account for None end dates..
if mrr.date_end is None:
mrr.date_end = datetime.today().date()+timedelta(days=90)
#+timedelta(days=90)
if mrr.date_start <= date_rotate_forward and mrr.date_end >= date_rotate_forward:
#print (str(mrr.id)+' '+str(mrr.date_start)+' '+str(mrr.date_end)+' '+str(mrr.campsite.id) )
#mooring_rate[date_rotate_forward] = {}
mooring_rate[date_rotate_forward][mrr.campsite_id] = mrr
#print (mooring_rate)
return mooring_rate
#for i in range(duration):
# date_rotate_forward = start_date+timedelta(days=i)
# print (date_rotate_forward)
# mooring_rate_search_filter = Q()
# mooring_rate_search_filter &= Q(search_filter) & Q(date_start__lte=date_rotate_forward) & Q(date_end__gte=date_rotate_forward)
# #print MooringsiteRate.objects.filter(campsite_id__in=[mooring_site_ids])
# #campsite_id__in=mooring_site_ids
# print (MooringsiteRate.objects.filter(mooring_rate_search_filter).query)
# mr = MooringsiteRate.objects.filter(mooring_rate_search_filter).order_by('date_start')
# #mr = MooringsiteRate.objects.filter(campsite_id__in=[1,2,3,4,5,6],date_start__lte=date_rotate_forward, date_end__gte=date_rotate_forward).order_by('date_start')
# for msr in mr:
# mooring_rate[date_rotate_forward] = {}
# mooring_rate[date_rotate_forward][msr.campsite.id] = msr
# # mooring_rate[date_rotate_forward][mr.campsite_id] = msr
# print (mr)
# print ("MOOO RATE")
# print (mooring_rate)
# if MooringsiteRate.objects.filter(campsite_id=site.pk,date_start__lte=date_rotate_forward, date_end__gte=date_rotate_forward).count() > 0:
# mooring_rate = MooringsiteRate.objects.filter(campsite_id=site.pk,date_start__lte=date_rotate_forward, date_end__gte=date_rotate_forward).order_by('-date_start')[0]
# else:
## if MooringsiteRate.objects.filter(campsite_id=site.pk,date_start__lte=date_rotate_forward, date_end=None).count() > 0:
# mooring_rate = MooringsiteRate.objects.filter(campsite_id=site.pk,date_start__lte=date_rotate_forward, date_end=None).order_by('-date_start')[0]
def get_campsite_availability(campsites_qs, start_date, end_date, ongoing_booking, request=None):
"""Fetch the availability of each mooring in a queryset over a range of visit dates."""
# fetch all of the single-day MooringsiteBooking objects within the date range for the sites
end_date =end_date+timedelta(days=1)
start_date_time = datetime.strptime(str(start_date)+str(' 00:00'), '%Y-%m-%d %H:%M')
end_date_time = datetime.strptime(str(end_date)+str(' 23:59'), '%Y-%m-%d %H:%M')
booking_id = None
booking_period_option = None
today = date.today()
nowtime = datetime.today()
mooring_date_selected = {}
if ongoing_booking:
booking_id = ongoing_booking.id
#booking_period_option = ongoing_booking.booking_period_option
booking_old_id=None
if request is not None:
#if request.get('session', None):
if request:
if 'ps_booking_old' in request.session:
booking_old_id = request.session['ps_booking_old']
bookings_qs = MooringsiteBooking.objects.filter(
campsite__in=campsites_qs,
#date__gte=start_date,
#date__lt=end_date
from_dt__gte=start_date_time,
to_dt__lt=end_date_time,
#booking__expiry_time__gte=datetime.now()
).exclude(booking__id=booking_old_id).order_by('date', 'campsite__name')
# booking__expiry_time__gte=datetime.now()
booking_qs = None
# prefill all slots as 'open'
duration = (end_date-start_date).days
#results = {site.pk: {start_date+timedelta(days=i): ['open', ] for i in range(duration)} for site in campsites_qs}
# Build Hash of open periods
mooring_rate_hash = generate_mooring_rate(campsites_qs,start_date, end_date, duration)
results = {}
# return results
for site in campsites_qs:
results[site.pk] = {}
cgbr_qs = MooringAreaBookingRange.objects.filter(
Q(campground=site.mooringarea),
Q(status=1),
Q(range_start__lt=end_date_time+timedelta(days=1)) & (Q(range_end__gte=start_date_time-timedelta(days=3))|Q(range_end__isnull=True))
)
for i in range(duration):
date_rotate_forward = start_date+timedelta(days=i)
mooring_date_selected[date_rotate_forward] = 'notselected'
mooring_rate = None
if date_rotate_forward in mooring_rate_hash:
if site.pk in mooring_rate_hash[date_rotate_forward]:
mooring_rate = mooring_rate_hash[date_rotate_forward][site.pk]
#print mooring_rate
#print ("BOOKING PERIOD")
#print (mooring_rate.booking_period.booking_period.all())
#print ("MOORING RATE")
#print (mooring_rate)
#if MooringsiteRate.objects.filter(campsite_id=site.pk,date_start__lte=date_rotate_forward, date_end__gte=date_rotate_forward).count() > 0:
# mooring_rate = MooringsiteRate.objects.filter(campsite_id=site.pk,date_start__lte=date_rotate_forward, date_end__gte=date_rotate_forward).order_by('-date_start')[0]
#else:
# if MooringsiteRate.objects.filter(campsite_id=site.pk,date_start__lte=date_rotate_forward, date_end=None).count() > 0:
# mooring_rate = MooringsiteRate.objects.filter(campsite_id=site.pk,date_start__lte=date_rotate_forward, date_end=None).order_by('-date_start')[0]
#print (mooring_rate)
#print ("GET CMA 9")
#print (datetime.utcnow())
booking_period = {}
selection_period = {}
bp_result = []
if mooring_rate:
if mooring_rate.booking_period is None:
continue
bp_result = mooring_rate.booking_period.booking_period.all()
if bp_result is None:
continue
for bp in bp_result:
booking_period[bp.pk] = 'open'
selection_period[bp.pk] = 0
if bp.start_time is None or bp.finish_time is None:
booking_period[bp.pk] = 'closed'
continue
nowtimewa = nowtime+timedelta(hours=8)
start_dt = datetime.strptime(str(date_rotate_forward)+' '+str(bp.start_time), '%Y-%m-%d %H:%M:%S')
finish_dt = datetime.strptime(str(date_rotate_forward)+' '+str(bp.finish_time), '%Y-%m-%d %H:%M:%S')
if start_dt > finish_dt:
finish_dt = finish_dt+timedelta(days=1)
if date_rotate_forward < today:
booking_period[bp.pk] = 'closed'
if today == date_rotate_forward:
if ongoing_booking:
if ongoing_booking.old_booking is None:
pass
else:
if nowtime > start_dt:
pass
#booking_period[bp.pk] = 'closed'
else:
pass
#if nowtime > start_dt:
# booking_period[bp.pk] = 'closed'
for closure in cgbr_qs:
# CLOSURE INFORMATION
if closure.range_end:
c_range_end = closure.range_end
else:
c_range_end = closure.range_start
start = max(start_date, (closure.range_start+ timedelta(hours=8)).date() -timedelta(days=2))
end = min(end_date, (c_range_end + timedelta(hours=8)).date()) if c_range_end.date() else end_date
closure_range = (end-start).days + 1
closure_start = closure.range_start+ timedelta(hours=8)
closure_finish = c_range_end+timedelta(hours=8)
# BOOKING PERIOD
if closure_start.strftime('%Y-%m-%d %H:%M:%S') >= start_dt.strftime('%Y-%m-%d %H:%M:%S'):
if closure_start.strftime('%Y-%m-%d %H:%M:%S') <= finish_dt.strftime('%Y-%m-%d %H:%M:%S'):
booking_period[bp.pk] = 'closed'
if closure_finish.strftime('%Y-%m-%d %H:%M:%S') >= start_dt.strftime('%Y-%m-%d %H:%M:%S'):
if closure_finish.strftime('%Y-%m-%d %H:%M:%S') <= finish_dt.strftime('%Y-%m-%d %H:%M:%S'):
booking_period[bp.pk] = 'closed'
if closure_start.strftime('%Y-%m-%d %H:%M:%S') <= start_dt.strftime('%Y-%m-%d %H:%M:%S') and closure_finish.strftime('%Y-%m-%d %H:%M:%S') >= finish_dt.strftime('%Y-%m-%d %H:%M:%S'):
booking_period[bp.pk] = 'closed'
results[site.pk][date_rotate_forward] = ['closed',booking_period,selection_period, bp_result]
#results[site.pk][start_date+timedelta(days=i)] = ['closed',booking_period,selection_period, bp_result]
# Determine availablity
for b in bookings_qs:
if b.booking.id == booking_old_id:
continue
if b.booking.booking_type == 4:
print ("CANCELLED BOOKING")
continue
# Release booking availablity on Expired Bookings
if b.booking.booking_type == 3 or b.booking.booking_type == 5:
if b.booking.expiry_time is not None:
if b.booking.expiry_time < datetime.now(tz=timezone.utc):
continue
for i in range(duration):
date_rotate_forward = start_date+timedelta(days=i)
mooring_rate = None
if date_rotate_forward in mooring_rate_hash:
if b.campsite.id in mooring_rate_hash[date_rotate_forward]:
mooring_rate = mooring_rate_hash[date_rotate_forward][b.campsite.id]
if mooring_rate:
for bp in mooring_rate.booking_period.booking_period.all():
start_dt = datetime.strptime(str(date_rotate_forward)+' '+str(bp.start_time), '%Y-%m-%d %H:%M:%S')
finish_dt = datetime.strptime(str(date_rotate_forward)+' '+str(bp.finish_time), '%Y-%m-%d %H:%M:%S')
if start_dt > finish_dt:
finish_dt = finish_dt+timedelta(days=1)
from_dt = b.from_dt + timedelta(hours=8)
to_dt = b.to_dt + timedelta(hours=8)
if from_dt.strftime('%Y-%m-%d %H:%M:%S') >= start_dt.strftime('%Y-%m-%d %H:%M:%S'):
if from_dt.strftime('%Y-%m-%d %H:%M:%S') <= finish_dt.strftime('%Y-%m-%d %H:%M:%S'):
if date_rotate_forward in results[b.campsite.id]:
if results[b.campsite.id][date_rotate_forward][1][bp.id] != 'selected':
results[b.campsite.id][date_rotate_forward][1][bp.id] = 'closed'
if b.booking.id == booking_id:
if bp.id == b.booking_period_option.id:
results[b.campsite.id][date_rotate_forward][1][bp.id] = 'selected'
results[b.campsite.id][date_rotate_forward][2][bp.id] = b.id
mooring_date_selected[date_rotate_forward] = 'selected'
pass
if to_dt.strftime('%Y-%m-%d %H:%M:%S') >= start_dt.strftime('%Y-%m-%d %H:%M:%S'):
if to_dt.strftime('%Y-%m-%d %H:%M:%S') <= finish_dt.strftime('%Y-%m-%d %H:%M:%S'):
if date_rotate_forward in results[b.campsite.id]:
if bp.id in results[b.campsite.id][date_rotate_forward][1]:
if results[b.campsite.id][date_rotate_forward][1][bp.id] != 'selected':
results[b.campsite.id][date_rotate_forward][1][bp.id] = 'closed'
if b.booking.id == booking_id:
if bp.id == b.booking_period_option.id:
results[b.campsite.id][date_rotate_forward][1][bp.id] = 'selected'
results[b.campsite.id][date_rotate_forward][2][bp.id] = b.id
mooring_date_selected[date_rotate_forward] = 'selected'
pass
if from_dt.strftime('%Y-%m-%d %H:%M:%S') <= start_dt.strftime('%Y-%m-%d %H:%M:%S') and to_dt.strftime('%Y-%m-%d %H:%M:%S') >= finish_dt.strftime('%Y-%m-%d %H:%M:%S'):
if date_rotate_forward in results[b.campsite.id]:
if bp.id in results[b.campsite.id][date_rotate_forward][1]:
if results[b.campsite.id][date_rotate_forward][1][bp.id] != 'selected':
results[b.campsite.id][date_rotate_forward][1][bp.id] = 'closed'
if b.booking.id == booking_id:
if bp.id == b.booking_period_option.id:
results[b.campsite.id][date_rotate_forward][1][bp.id] = 'selected'
results[b.campsite.id][date_rotate_forward][2][bp.id] = b.id
mooring_date_selected[date_rotate_forward] = 'selected'
pass
# prevent other mooring from being selected for same day preventing mooring lockouts
for ma in results:
for ma_dt in results[ma]:
if mooring_date_selected[ma_dt] == 'selected':
for bp in results[ma][ma_dt][1]:
if results[ma][ma_dt][1][bp] == 'open':
pass
results[ma][ma_dt][1][bp] = 'perday'
mooring_map = {cg[0]: [cs.pk for cs in campsites_qs if cs.mooringarea.pk == cg[0]] for cg in campsites_qs.distinct('mooringarea').values_list('mooringarea')}
today = date.today()
print ("GLOBA 1")
# strike out days after the max_advance_booking
for site in campsites_qs:
max_advance = None
max_advance_open_time = '00:00'
try:
group = MooringAreaGroup.objects.get(moorings__in=[site.mooringarea])
except:
group = None
if group:
globalsettings = GlobalSettings.objects.filter(mooring_group__in=[group,])
for gs in globalsettings:
if gs.key == 2:
#max_advance = int(GlobalSettings.objects.get(key=2, mooring_group__in=[group,]).value)
max_advance = int(gs.value)
if gs.key == 18:
max_advance_open_time = gs.value
#max_advance = int(GlobalSettings.objects.get(key=2, mooring_group__in=[group,]).value)
#if GlobalSettings.objects.filter(key=18, mooring_group__in=[group,]).count():
# max_advance_open_time = GlobalSettings.objects.get(key=18, mooring_group__in=[group,]).value
else:
qs = GlobalSettings.objects.filter(key=2)
highest_val = 0
for q in qs:
if int(q.value) > highest_val:
highest_val = int(q.value)
max_advance = highest_val
max_advance_open_time_dt = datetime.strptime(str(today)+' '+str(max_advance_open_time), '%Y-%m-%d %H:%M')
if nowtime > max_advance_open_time_dt:
pass
else:
max_advance = max_advance - 1
stop = today + timedelta(days=max_advance)
stop_mark = min(max(stop, start_date), end_date)
#if start_date > stop:
for i in range((end_date-stop_mark).days):
if stop_mark+timedelta(days=i) > stop:
results[site.pk][stop_mark+timedelta(days=i)][0] = 'toofar'
for b in results[site.pk][stop_mark+timedelta(days=i)][1]:
results[site.pk][stop_mark+timedelta(days=i)][1][b] = 'toofar'
# Get the current stay history
stay_history = None
if campsites_qs.count() > 0:
stay_history = MooringAreaStayHistory.objects.filter(
Q(range_start__lte=start_date,range_end__gte=start_date)|# filter start date is within period
Q(range_start__lte=end_date,range_end__gte=end_date)|# filter end date is within period
Q(Q(range_start__gt=start_date,range_end__lt=end_date)&Q(range_end__gt=today)) #filter start date is before and end date after period
,mooringarea=campsites_qs.first().mooringarea
)
if stay_history:
max_days = min([x.max_days for x in stay_history])
else:
max_days = settings.PS_MAX_BOOKING_LENGTH
# strike out days after the max_stay period
for site in campsites_qs:
stay_history = MooringAreaStayHistory.objects.filter(
Q(range_start__lte=start_date,range_end__gte=start_date)|# filter start date is within period
Q(range_start__lte=end_date,range_end__gte=end_date)|# filter end date is within period
Q(Q(range_start__gt=start_date,range_end__lt=end_date)&Q(range_end__gt=today)) #filter start date is before and end date after period
,mooringarea=site.mooringarea
)
if stay_history:
max_days = min([x.max_days for x in stay_history])
else:
max_days = settings.PS_MAX_BOOKING_LENGTH
stop = start_date + timedelta(days=max_days)
stop_mark = min(max(stop, start_date), end_date)
for i in range((end_date-stop_mark).days):
date_key = stop_mark+timedelta(days=i)
if date_key in results[site.pk]:
results[site.pk][stop_mark+timedelta(days=i)][0] = 'toofar'
for b in results[site.pk][stop_mark+timedelta(days=i)][1]:
if results[site.pk][stop_mark+timedelta(days=i)][1][b] == 'open':
results[site.pk][stop_mark+timedelta(days=i)][1][b] = 'maxstay'
return results
def get_visit_rates(campsites_qs, start_date, end_date):
"""Fetch the per-day pricing for each visitor type over a range of visit dates."""
# fetch the applicable rates for the campsites
rates_qs = MooringsiteRate.objects.filter(
Q(campsite__in=campsites_qs),
Q(date_start__lt=end_date) & (Q(date_end__gte=start_date)|Q(date_end__isnull=True))
).prefetch_related('rate')
# prefill all slots
duration = (end_date-start_date).days+1
results = {
site.pk: {
start_date+timedelta(days=i): {
'mooring': '0.00',
'adult': '0.00',
'child': '0.00',
'concession': '0.00',
'infant': '0.00',
'booking_period' : []
} for i in range(duration)
} for site in campsites_qs
}
# make a record of the earliest MooringsiteRate for each site
early_rates = {}
for rate in rates_qs:
#if rate.campsite.pk not in early_rates:
# early_rates[rate.campsite.pk] = rate
#elif early_rates[rate.campsite.pk].date_start > rate.date_start:
# early_rates[rate.campsite.pk] = rate
# for the period of the visit overlapped by the rate, set the amounts
start = max(start_date, rate.date_start)
end = min(end_date, rate.date_end) if rate.date_end else end_date
for i in range((end-start).days+1):
if rate.booking_period is None:
continue
booking_period = rate.booking_period.booking_period.all()
results[rate.campsite.pk][start+timedelta(days=i)]['mooring'] = str(rate.rate.mooring)
results[rate.campsite.pk][start+timedelta(days=i)]['adult'] = str(rate.rate.adult)
results[rate.campsite.pk][start+timedelta(days=i)]['concession'] = str(rate.rate.concession)
results[rate.campsite.pk][start+timedelta(days=i)]['child'] = str(rate.rate.child)
results[rate.campsite.pk][start+timedelta(days=i)]['infant'] = str(rate.rate.infant)
for b in booking_period:
if b.caption is None:
b.caption = ''
booking_period_row = {'id':b.id, 'period_name' : b.period_name, 'small_price': format(b.small_price,'.2f'), 'medium_price': format(b.medium_price,'.2f'), 'large_price' : format(b.large_price,'.2f'), 'start_time' : b.start_time, 'finish_time' : b.finish_time,'all_day' : b.all_day, 'caption': b.caption, 'created' : b.created }
# booking_period_row = {}
# booking_period_row['id'] = b.id
# booking_period_row['period_name'] = b.period_name
# , 'period_name' : b.period_name, 'small_price': str(b.small_price), 'medium_price': str(b.medium_price), 'large_price' : str(b.large_price), 'start_time' : str(b.start_time), 'finish_time' : str(b.finish_time),'all_day' : str(b.all_day), 'created' : str(b.created) )
results[rate.campsite.pk][start+timedelta(days=i)]['booking_period'].append(booking_period_row)
# complain if there's a Mooringsite without a MooringsiteRate
if len(early_rates) < rates_qs.count():
print('Missing Mooring Site Rate coverage!')
# for ease of testing against the old datasets, if the visit dates are before the first
# MooringsiteRate date, use that MooringsiteRate as the pricing model.
for site_pk, rate in early_rates.items():
if start_date < rate.date_start:
start = start_date
end = rate.date_start
for i in range((end-start).days):
results[site_pk][start+timedelta(days=i)]['mooring'] = str(rate.rate.mooring)
results[site_pk][start+timedelta(days=i)]['adult'] = str(rate.rate.adult)
results[site_pk][start+timedelta(days=i)]['concession'] = str(rate.rate.concession)
results[site_pk][start+timedelta(days=i)]['child'] = str(rate.rate.child)
results[site_pk][start+timedelta(days=i)]['infant'] = str(rate.rate.infant)
if rate.booking_period is None:
continue
for b in rate.booking_period.booking_period.all():
booking_period_row = {'id':b.id, 'period_name' : b.period_name, 'small_price': format(b.small_price,'.2f'), 'medium_price': format(b.medium_price,'.2f'), 'large_price' : format(b.large_price,'.2f'), 'start_time' : b.start_time, 'finish_time' : b.finish_time,'all_day' : b.all_day, 'created' : b.created }
results[site_pk][start+timedelta(days=i)]['booking_period'].append(booking_period_row)
return results
def get_available_campsitetypes(campground_id,start_date,end_date,_list=True):
try:
cg = MooringArea.objects.get(id=campground_id)
if _list:
available_campsiteclasses = []
else:
available_campsiteclasses = {}
for _class in cg.campsite_classes:
sites_qs = Mooringsite.objects.all()
# sites_qs = Mooringsite.objects.filter(
# campground=campground_id,
# mooringsite_class=_class
# )
sites_qs = None
if sites_qs.exists():
# get availability for sites, filter out the non-clear runs
availability = get_campsite_availability(sites_qs, start_date, end_date)
excluded_site_ids = set()
for site_id, dates in availability.items():
if not all([v[0] == 'open' for k, v in dates.items()]):
excluded_site_ids.add(site_id)
# create a list of campsites without bookings for that period
sites = [x for x in sites_qs if x.pk not in excluded_site_ids]
if sites:
if not _list:
available_campsiteclasses[_class] = sites
else:
available_campsiteclasses.append(_class)
return available_campsiteclasses
except MooringArea.DoesNotExist:
raise Exception('The campsite you are searching does not exist')
except:
raise
def get_available_campsites_list(campsite_qs,request, start_date, end_date):
from mooring.serialisers import MooringsiteSerialiser
campsites = get_campsite_availability(campsite_qs, start_date, end_date)
available = []
for camp in campsites:
av = [item for sublist in campsites[camp].values() for item in sublist]
if ('booked' not in av):
if ('closed' not in av):
available.append(MooringsiteSerialiser(Mooringsite.objects.filter(id = camp),many=True,context={'request':request}).data[0])
return available
def get_available_campsites_list_booking(campsite_qs,request, start_date, end_date,booking):
'''
Used to get the available campsites in the selected period
and the ones currently attached to a booking
'''
from mooring.serialisers import MooringsiteSerialiser
campsites = get_campsite_availability(campsite_qs, start_date, end_date)
available = []
for camp in campsites:
av = [item for sublist in campsites[camp].values() for item in sublist]
if ('booked' not in av or camp in booking.campsite_id_list):
if ('closed' not in av):
available.append(MooringsiteSerialiser(Mooringsite.objects.filter(id = camp),many=True,context={'request':request}).data[0])
#complete = [MooringsiteSerialiser(Mooringsite.objects.filter(id = camp),many=True,context={'request':request}).data[0]]
return available
def get_campsite_current_rate(request,campsite_id,start_date,end_date):
res = []
if start_date and end_date:
start_date = datetime.strptime(start_date,"%Y-%m-%d").date()
end_date = datetime.strptime(end_date,"%Y-%m-%d").date()
for single_date in daterange(start_date, end_date):
price_history = MooringsiteRate.objects.filter(campsite=campsite_id,date_start__lte=single_date).order_by('-date_start')
if price_history:
rate = RateSerializer(price_history[0].rate,context={'request':request}).data
rate['campsite'] = campsite_id
res.append({
"date" : single_date.strftime("%Y-%m-%d"),
"rate" : rate
})
return res
def get_park_entry_rate(request,start_date):
res = []
if start_date:
start_date = datetime.strptime(start_date,"%Y-%m-%d").date()
price_history = MarinaEntryRate.objects.filter(period_start__lte = start_date).order_by('-period_start')
if price_history:
serializer = MarinaEntryRateSerializer(price_history,many=True,context={'request':request})
res = serializer.data[0]
return res
def override_lineitems(override_price, override_reason, total_price, oracle_code, override_reason_info=""):
invoice_line = []
if oracle_code:
#if override_reason:
discount = Decimal(override_price) - Decimal(override_price) - Decimal(override_price)
invoice_line.append({"ledger_description": '{} - {}'.format(override_reason.text, override_reason_info), "quantity": 1, 'price_incl_tax': discount, 'oracle_code': oracle_code, 'line_status': 1})
return invoice_line
def nononline_booking_lineitems(oracle_code, request):
invoice_line = []
if oracle_code:
group = MooringAreaGroup.objects.filter(members__in=[request.user])
value = GlobalSettings.objects.get(mooring_group=group, key=0).value
if Decimal(value) > 0:
invoice_line.append({'ledger_description': 'Phone Booking Fee', 'quantity': 1, 'price_incl_tax': Decimal(value), 'oracle_code': oracle_code, 'line_status': 1})
# invoice_line.append({'ledger_description': 'Phone Booking Fee', 'quantity': 1, 'price_incl_tax': Decimal(value), 'oracle_code': oracle_code})
return invoice_line
def admission_lineitems(lines):
invoice_lines = []
if lines:
for line in lines:
if line['guests'] > 0:
invoice_lines.append({'ledger_description': 'Admissions {} - {} ({} guests)'.format(line['from'], line['to'], line['guests']), "quantity": 1, 'price_incl_tax': line['admissionFee'], "oracle_code": line['oracle_code'], 'line_status': 1})
# invoice_lines.append({'ledger_description': 'Admissions {} - {} ({} guests)'.format(line['from'], line['to'], line['guests']), "quantity": 1, 'price_incl_tax': line['admissionFee'], "oracle_code": line['oracle_code']})
return invoice_lines
def calculate_price_booking_cancellation(booking, overide_cancel_fees=False):
current_date_time = datetime.strptime(datetime.now().strftime('%Y-%m-%d %H:%M:%S'), '%Y-%m-%d %H:%M:%S')
nowtime = datetime.today()
nowtimec = datetime.strptime(nowtime.strftime('%Y-%m-%d'),'%Y-%m-%d')
mg = MooringAreaGroup.objects.all()
booking = MooringsiteBooking.objects.filter(booking=booking)
cancellation_fees = []
adjustment_fee = Decimal('0.00')
#{'additional_fees': 'true', 'description': 'Booking Change Fee','amount': Decimal('0.00')}
for ob in booking:
changed = True
#for bc in booking_changes:
# if bc.campsite == ob.campsite and ob.from_dt == bc.from_dt and ob.to_dt == bc.to_dt and ob.booking_period_option == bc.booking_period_option:
# changed = False
from_dt = datetime.strptime(ob.from_dt.strftime('%Y-%m-%d'),'%Y-%m-%d')
daystillbooking = (from_dt-nowtimec).days
cancel_policy = None
cancel_fee_amount = '0.00'
#change_price_period = CancelPricePeriod.objects.filter(id=ob.booking_period_option.cancel_group_id).order_by('days')
cancel_group = CancelGroup.objects.get(id=ob.booking_period_option.cancel_group_id)
cancel_price_period = cancel_group.cancel_period.all().order_by('days')
mooring_group =None
for i in mg:
if i.moorings.count() > 0:
mooring_group = i.moorings.all()[0].id
for cpp in cancel_price_period:
if daystillbooking < 0:
daystillbooking = 0
if daystillbooking >= cpp.days:
cancel_policy =cpp
if cancel_policy:
if cancel_policy.calulation_type == 0:
# Percentage
cancel_fee_amount = float(ob.amount) * (cancel_policy.percentage / 100)
elif cancel_policy.calulation_type == 1:
cancel_fee_amount = cancel_policy.amount
# Fixed Pricing
description = 'Mooring {} ({} - {})'.format(ob.campsite.mooringarea.name,ob.from_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'),ob.to_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'))
if overide_cancel_fees is True:
cancellation_fees.append({'additional_fees': 'true', 'description': 'Refund - '+description,'amount': str(ob.amount - ob.amount - ob.amount), 'mooring_group': mooring_group, 'oracle_code': str(ob.campsite.mooringarea.oracle_code)})
else:
if datetime.strptime(ob.from_dt.astimezone(pytimezone('Australia/Perth')).strftime('%Y-%m-%d %H:%M:%S'),'%Y-%m-%d %H:%M:%S') < current_date_time:
#cancellation_fees.append({'additional_fees': 'true', 'description': 'Past Booking - '+description,'amount': Decimal('0.00'), 'mooring_group': mooring_group})
cancellation_fees.append({'additional_fees': 'true', 'description': 'Past Booking - '+description,'amount': Decimal('0.00'), 'mooring_group': mooring_group, 'oracle_code': str(ob.campsite.mooringarea.oracle_code)})
else:
#change_fees['amount'] = str(refund_amount)
cancellation_fees.append({'additional_fees': 'true', 'description': 'Cancel Fee - '+description,'amount': cancel_fee_amount, 'mooring_group': mooring_group, 'oracle_code': str(ob.campsite.mooringarea.oracle_code)})
cancellation_fees.append({'additional_fees': 'true', 'description': 'Refund - '+description,'amount': str(ob.amount - ob.amount - ob.amount), 'mooring_group': mooring_group, 'oracle_code': str(ob.campsite.mooringarea.oracle_code)})
#cancellation_fees.append({'additional_fees': 'true', 'description': 'Cancel Fee - '+description,'amount': cancel_fee_amount, 'mooring_group': mooring_group})
#cancellation_fees.append({'additional_fees': 'true', 'description': 'Refund - '+description,'amount': str(ob.amount - ob.amount - ob.amount), 'mooring_group': mooring_group})
else:
print ("NO CANCELATION POLICY")
#else:
# adjustment_fee = ob.amount + adjustment_fee
#change_fees.append({'additional_fees': 'true', 'description': 'Mooring Adjustment Credit' ,'amount': str(adjustment_fee - adjustment_fee - adjustment_fee)})
return cancellation_fees
def calculate_price_booking_change(old_booking, new_booking,overide_change_fees=False):
nowtime = datetime.today()
nowtimec = datetime.strptime(nowtime.strftime('%Y-%m-%d'),'%Y-%m-%d')
old_booking_mooring = MooringsiteBooking.objects.filter(booking=old_booking)
booking_changes = MooringsiteBooking.objects.filter(booking=new_booking)
change_fees = []
adjustment_fee = Decimal('0.00')
mg = MooringAreaGroup.objects.all()
#{'additional_fees': 'true', 'description': 'Booking Change Fee','amount': Decimal('0.00')}
for ob in old_booking_mooring:
changed = True
for bc in booking_changes:
if bc.campsite == ob.campsite and ob.from_dt == bc.from_dt and ob.to_dt == bc.to_dt and ob.booking_period_option == bc.booking_period_option:
changed = False
from_dt = datetime.strptime(ob.from_dt.strftime('%Y-%m-%d'),'%Y-%m-%d')
daystillbooking = (from_dt-nowtimec).days
refund_policy = None
for i in mg:
if i.moorings.count() > 0:
mooring_group = i.moorings.all()[0].id
if changed is True:
change_fee_amount = '0.00'
# change_price_period = ChangePricePeriod.objects.filter(id=ob.booking_period_option.change_group_id).order_by('-days')
change_group = ChangeGroup.objects.get(id=ob.booking_period_option.change_group_id)
change_price_period = change_group.change_period.all().order_by('days')
for cpp in change_price_period:
if daystillbooking < 0:
daystillbooking = 0
# if cpp.days >= daystillbooking:
if daystillbooking >= cpp.days:
refund_policy =cpp
if refund_policy:
if refund_policy.calulation_type == 0:
# Percentage
change_fee_amount = float(ob.amount) * (refund_policy.percentage / 100)
elif refund_policy.calulation_type == 1:
change_fee_amount = refund_policy.amount
# Fixed Pricing
description = 'Mooring {} ({} - {})'.format(ob.campsite.mooringarea.name,ob.from_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'),ob.to_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'))
if overide_change_fees is True:
change_fees.append({'additional_fees': 'true', 'description': 'Refund - '+description,'amount': str(format(ob.amount - ob.amount - ob.amount, '.2f')), 'oracle_code': str(ob.campsite.mooringarea.oracle_code), 'mooring_group': mooring_group, 'line_status': 3})
else:
#change_fees['amount'] = str(refund_amount)
#change_fees.append({'additional_fees': 'true', 'description': 'Change Fee - '+description,'amount': float(change_fee_amount), 'oracle_code': str(ob.campsite.mooringarea.oracle_code), 'mooring_group': mooring_group})
#change_fees.append({'additional_fees': 'true', 'description': 'Refund - '+description,'amount': str(ob.amount - ob.amount - ob.amount), 'oracle_code': str(ob.campsite.mooringarea.oracle_code), 'mooring_group': mooring_group})
change_fees.append({'additional_fees': 'true', 'description': 'Change Fee - '+description,'amount': str(format(change_fee_amount, '.2f')), 'oracle_code': str(ob.campsite.mooringarea.oracle_code), 'mooring_group': mooring_group, 'line_status': 2})
change_fees.append({'additional_fees': 'true', 'description': 'Refund - '+description,'amount': str(format(ob.amount - ob.amount - ob.amount, '.2f')), 'oracle_code': str(ob.campsite.mooringarea.oracle_code), 'mooring_group': mooring_group, 'line_status': 3})
else:
print ("NO REFUND POLICY")
else:
#description = 'Mooring {} ({} - {})'.format(ob.campsite.mooringarea.name,ob.from_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'),ob.to_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'))
adjustment_fee = float('0.00')
adjustment_fee = float(ob.amount) + adjustment_fee
description = 'Mooring {} ({} - {})'.format(ob.campsite.mooringarea.name,ob.from_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'),ob.to_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'))
# change_fees.append({'additional_fees': 'true', 'description': 'Adjustment - '+description ,'amount': str(adjustment_fee - adjustment_fee - adjustment_fee), 'oracle_code': str(ob.campsite.mooringarea.oracle_code), 'mooring_group': mooring_group})
change_fees.append({'additional_fees': 'true', 'description': 'Adjustment - '+description ,'amount': str(format(adjustment_fee - adjustment_fee - adjustment_fee, '.2f')), 'oracle_code': str(ob.campsite.mooringarea.oracle_code), 'mooring_group': mooring_group, 'line_status': 3})
return change_fees
def calculate_price_admissions_cancel(adBooking, change_fees, overide_cancel_fees=False):
ad_lines = AdmissionsLine.objects.filter(admissionsBooking=adBooking)
for line in ad_lines:
if line.arrivalDate > date.today() or overide_cancel_fees is True:
description = "Admission ({}) for {} guest(s)".format(datetime.strftime(line.arrivalDate, '%d/%m/%Y'), adBooking.total_admissions)
oracle_code = AdmissionsOracleCode.objects.filter(mooring_group=line.location.mooring_group)[0]
change_fees.append({'additional_fees': 'true', 'description': 'Refund - ' + description,'amount': str(line.cost - line.cost - line.cost), 'oracle_code': str(oracle_code.oracle_code), 'mooring_group': line.location.mooring_group.id, 'line_status': 3})
return change_fees
def calculate_price_admissions_change(adBooking, change_fees):
ad_lines = AdmissionsLine.objects.filter(admissionsBooking=adBooking)
for line in ad_lines:
description = "Admission ({}) for {} guest(s)".format(datetime.strftime(line.arrivalDate, '%d/%m/%Y'), adBooking.total_admissions)
oracle_code = AdmissionsOracleCode.objects.filter(mooring_group=line.location.mooring_group)[0]
# Fees
change_fees.append({'additional_fees': 'true', 'description': 'Adjustment - ' + description,'amount': str(line.cost - line.cost - line.cost), 'oracle_code': str(oracle_code.oracle_code), 'mooring_group': line.location.mooring_group.id, 'line_status': 3 })
return change_fees
def price_or_lineitems(request,booking,campsite_list,lines=True,old_booking=None):
total_price = Decimal(0)
booking_mooring = MooringsiteBooking.objects.filter(booking=booking)
booking_mooring_old = []
if booking.old_booking:
booking_mooring_old = MooringsiteBooking.objects.filter(booking=booking.old_booking)
invoice_lines = []
if lines:
for bm in booking_mooring:
line_status = 1
amount = bm.amount
if str(bm.id) in booking.override_lines:
amount = Decimal(booking.override_lines[str(bm.id)])
for ob in booking_mooring_old:
if bm.campsite == ob.campsite and ob.from_dt == bm.from_dt and ob.to_dt == bm.to_dt and ob.booking_period_option == bm.booking_period_option:
line_status = 2
invoice_lines.append({'ledger_description':'Mooring {} ({} - {})'.format(bm.campsite.mooringarea.name,bm.from_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'),bm.to_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p')),"quantity":1,"price_incl_tax":amount,"oracle_code":bm.campsite.mooringarea.oracle_code, 'line_status': line_status})
# invoice_lines.append({'ledger_description':'Mooring {} ({} - {})'.format(bm.campsite.mooringarea.name,bm.from_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p'),bm.to_dt.astimezone(pytimezone('Australia/Perth')).strftime('%d/%m/%Y %H:%M %p')),"quantity":1,"price_incl_tax":bm.amount,"oracle_code":bm.campsite.mooringarea.oracle_code})
return invoice_lines
else:
return total_price
def price_or_lineitems_extras(request,booking,change_fees,invoice_lines=[]):
total_price = Decimal(0)
booking_mooring = MooringsiteBooking.objects.filter(booking=booking)
for cf in change_fees:
# invoice_lines.append({'ledger_description':cf['description'],"quantity":1,"price_incl_tax":cf['amount'],"oracle_code":cf['oracle_code']})
invoice_lines.append({'ledger_description':cf['description'],"quantity":1,"price_incl_tax":cf['amount'],"oracle_code":cf['oracle_code'], 'line_status': cf['line_status']})
return invoice_lines
def old_price_or_lineitems(request,booking,campsite_list,lines=True,old_booking=None):
total_price = Decimal(0)
rate_list = {}
invoice_lines = []
if not lines and not old_booking:
raise Exception('An old booking is required if lines is set to false')
# Create line items for customers
daily_rates = [get_campsite_current_rate(request,c,booking.arrival.strftime('%Y-%m-%d'),booking.departure.strftime('%Y-%m-%d')) for c in campsite_list]
if not daily_rates:
raise Exception('There was an error while trying to get the daily rates.')
for rates in daily_rates:
for c in rates:
if c['rate']['campsite'] not in rate_list.keys():
rate_list[c['rate']['campsite']] = {c['rate']['id']:{'start':c['date'],'end':c['date'],'mooring': c['rate']['mooring'] ,'adult':c['rate']['adult'],'concession':c['rate']['concession'],'child':c['rate']['child'],'infant':c['rate']['infant']}}
else:
if c['rate']['id'] not in rate_list[c['rate']['campsite']].keys():
rate_list[c['rate']['campsite']] = {c['rate']['id']:{'start':c['date'],'end':c['date'],'mooring': c['rate']['mooring'], 'adult':c['rate']['adult'],'concession':c['rate']['concession'],'child':c['rate']['child'],'infant':c['rate']['infant']}}
else:
rate_list[c['rate']['campsite']][c['rate']['id']]['end'] = c['date']
# Get Guest Details
#guests = {}
#for k,v in booking.details.items():
# if 'num_' in k:
# guests[k.split('num_')[1]] = v
##### Above is for poeple quantity (mooring are not based on people.. based on vessels)
# guess is used as the quantity items for the check out basket.
guests = {}
guests['mooring'] = 1
for k,v in guests.items():
if int(v) > 0:
for c,p in rate_list.items():
for i,r in p.items():
price = Decimal(0)
end = datetime.strptime(r['end'],"%Y-%m-%d").date()
start = datetime.strptime(r['start'],"%Y-%m-%d").date()
num_days = int ((end - start).days) + 1
campsite = Mooringsite.objects.get(id=c)
if lines:
price = str((num_days * Decimal(r[k])))
#if not booking.mooringarea.oracle_code:
# raise Exception('The mooringarea selected does not have an Oracle code attached to it.')
end_date = end + timedelta(days=1)
# invoice_lines.append({'ledger_description':'Mooring fee {} ({} - {})'.format(k,start.strftime('%d-%m-%Y'),end_date.strftime('%d-%m-%Y')),"quantity":v,"price_incl_tax":price,"oracle_code":booking.mooringarea.oracle_code})
invoice_lines.append({'ledger_description':'Admission fee on {} ({}) {}'.format(adLine.arrivalDate, group, overnightStay),"quantity":amount,"price_incl_tax":price, "oracle_code":oracle_code, 'line_status': 1})
else:
price = (num_days * Decimal(r[k])) * v
total_price += price
# Create line items for vehicles
if lines:
vehicles = booking.regos.all()
else:
vehicles = old_booking.regos.all()
if vehicles:
if booking.mooringarea.park.entry_fee_required:
# Update the booking vehicle regos with the park entry requirement
vehicles.update(park_entry_fee=True)
if not booking.mooringarea.park.oracle_code:
raise Exception('A marine park entry Oracle code has not been set for the park that the mooringarea belongs to.')
park_entry_rate = get_park_entry_rate(request,booking.arrival.strftime('%Y-%m-%d'))
vehicle_dict = {
'vessel' : vehicles.filter(entry_fee=True, type='vessel'),
#'vehicle': vehicles.filter(entry_fee=True, type='vehicle'),
'motorbike': vehicles.filter(entry_fee=True, type='motorbike'),
'concession': vehicles.filter(entry_fee=True, type='concession')
}
for k,v in vehicle_dict.items():
if v.count() > 0:
if lines:
price = park_entry_rate[k]
regos = ', '.join([x[0] for x in v.values_list('rego')])
invoice_lines.append({
'ledger_description': 'Mooring fee - {}'.format(k),
'quantity': v.count(),
'price_incl_tax': price,
'oracle_code': booking.mooringarea.park.oracle_code
})
else:
price = Decimal(park_entry_rate[k]) * v.count()
total_price += price
if lines:
return invoice_lines
else:
return total_price
def get_admissions_entry_rate(request,start_date, location):
res = []
if start_date:
start_date = datetime.strptime(start_date,"%Y-%m-%d").date()
group = location.mooring_group
price_history = AdmissionsRate.objects.filter(mooring_group__in=[group,], period_start__lte = start_date).order_by('-period_start')
if price_history:
serializer = AdmissionsRateSerializer(price_history,many=True,context={'request':request})
res = serializer.data[0]
return res
def admissions_price_or_lineitems(request, admissionsBooking,lines=True):
total_price = Decimal(0)
rate_list = {}
invoice_lines = []
line = lines
daily_rates = []
# Create line items for customers
admissionsLines = AdmissionsLine.objects.filter(admissionsBooking=admissionsBooking)
for adLine in admissionsLines:
rate = get_admissions_entry_rate(request,adLine.arrivalDate.strftime('%Y-%m-%d'), adLine.location)
daily_rate = {'date' : adLine.arrivalDate.strftime('%d/%m/%Y'), 'rate' : rate}
daily_rates.append(daily_rate)
oracle_codes = AdmissionsOracleCode.objects.filter(mooring_group__in=[adLine.location.mooring_group,])
if not oracle_codes.count() > 0:
if request.user.is_staff:
raise Exception('Admissions Oracle Code missing, please set up in administration tool.')
else:
raise Exception('Please alert {} of the following error message:\nAdmissions Oracle Code missing.'.format(adLine['group']))
if not daily_rates or daily_rates == []:
raise Exception('There was an error while trying to get the daily rates.')
family = 0
adults = admissionsBooking.noOfAdults
children = admissionsBooking.noOfChildren
if adults > 1 and children > 1:
if adults == children:
if adults % 2 == 0:
family = adults//2
adults = 0
children = 0
else:
adults -= 1
family = adults//2
adults = 1
children = 1
elif adults > children: #Adults greater - tickets based on children
if children % 2 == 0:
family = children//2
adults -= children
children = 0
else:
children -= 1
family = children//2
adults -= children
children = 1
else: #Children greater - tickets based on adults
if adults % 2 == 0:
family = adults//2
children -= adults
adults = 0
else:
adults -= 1
family = adults//2
children -= adults
adults = 1
people = {'Adults': adults,'Concessions': admissionsBooking.noOfConcessions,'Children': children,'Infants': admissionsBooking.noOfInfants, 'Family': family}
for adLine in admissionsLines:
for group, amount in people.items():
if line:
if (amount > 0):
if group == 'Adults':
gr = 'adult'
elif group == 'Children':
gr = group
elif group == 'Infants':
gr = 'infant'
elif group == 'Family':
gr = 'family'
if adLine.overnightStay:
costfield = gr.lower() + "_overnight_cost"
overnightStay = "Overnight Included"
else:
costfield = gr.lower() + "_cost"
overnightStay = "Day Visit Only"
daily_rate = next(item for item in daily_rates if item['date'] == adLine.arrivalDate.strftime('%d/%m/%Y'))['rate']
price = daily_rate.get(costfield)
oracle_codes = AdmissionsOracleCode.objects.filter(mooring_group=adLine.location.mooring_group)
if oracle_codes.count() > 0:
oracle_code = oracle_codes[0].oracle_code
invoice_lines.append({'ledger_description':'Admission fee on {} ({}) {}'.format(adLine.arrivalDate, group, overnightStay),"quantity":amount,"price_incl_tax":price, "oracle_code":oracle_code, 'line_status': 1})
else:
daily_rate = daily_rates[adLine.arrivalDate.strftime('%d/%m/%Y')]
price = Decimal(daily_rate)
total_cost += price
if line:
return invoice_lines
else:
return total_price
def check_date_diff(old_booking,new_booking):
if old_booking.arrival == new_booking.arrival and old_booking.departure == new_booking.departure:
return 4 # same days
elif old_booking.arrival == new_booking.arrival:
old_booking_days = int((old_booking.departure - old_booking.arrival).days)
new_days = int((new_booking.departure - new_booking.arrival).days)
if new_days > old_booking_days:
return 1 #additional days
else:
return 2 #reduced days
elif old_booking.departure == new_booking.departure:
old_booking_days = int((old_booking.departure - old_booking.arrival).days)
new_days = int((new_booking.departure - new_booking.arrival).days)
if new_days > old_booking_days:
return 1 #additional days
else:
return 2 #reduced days
else:
return 3 # different days
def get_diff_days(old_booking,new_booking,additional=True):
if additional:
return int((new_booking.departure - old_booking.departure).days)
return int((old_booking.departure - new_booking.departure).days)
def create_temp_bookingupdate(request,arrival,departure,booking_details,old_booking,total_price):
# delete all the campsites in the old moving so as to transfer them to the new booking
old_booking.campsites.all().delete()
booking = create_booking_by_site(booking_details['campsites'],
start_date = arrival,
end_date = departure,
num_adult = booking_details['num_adult'],
num_concession= booking_details['num_concession'],
num_child= booking_details['num_child'],
num_infant= booking_details['num_infant'],
num_mooring = booking_details['num_mooring'],
cost_total = total_price,
customer = old_booking.customer,
override_price=old_booking.override_price,
updating_booking = True,
override_checks=True
)
# Move all the vehicles to the new booking
for r in old_booking.regos.all():
r.booking = booking
r.save()
lines = price_or_lineitems(request,booking,booking.campsite_id_list)
booking_arrival = booking.arrival.strftime('%d-%m-%Y')
booking_departure = booking.departure.strftime('%d-%m-%Y')
reservation = u'Reservation for {} confirmation PS{}'.format(
u'{} {}'.format(booking.customer.first_name, booking.customer.last_name), booking.id)
# Proceed to generate invoice
checkout_response = checkout(request,booking,lines,invoice_text=reservation,internal=True)
# FIXME: replace with session check
invoice = None
if 'invoice=' in checkout_response.url:
invoice = checkout_response.url.split('invoice=', 1)[1]
else:
for h in reversed(checkout_response.history):
if 'invoice=' in h.url:
invoice = h.url.split('invoice=', 1)[1]
break
# create the new invoice
new_invoice = internal_create_booking_invoice(booking, invoice)
# Check if the booking is a legacy booking and doesn't have an invoice
if old_booking.legacy_id and old_booking.invoices.count() < 1:
# Create a cash transaction in order to fix the outstnding invoice payment
CashTransaction.objects.create(
invoice = Invoice.objects.get(reference=new_invoice.invoice_reference),
amount = old_booking.cost_total,
type = 'move_in',
source = 'cash',
details = 'Transfer of funds from migrated booking',
movement_reference='Migrated Booking Funds'
)
# Update payment details for the new invoice
update_payments(new_invoice.invoice_reference)
# Attach new invoices to old booking
for i in old_booking.invoices.all():
inv = Invoice.objects.get(reference=i.invoice_reference)
inv.voided = True
#transfer to the new invoice
inv.move_funds(inv.transferable_amount,Invoice.objects.get(reference=new_invoice.invoice_reference),'Transfer of funds from {}'.format(inv.reference))
inv.save()
# Change the booking for the selected invoice
new_invoice.booking = old_booking
new_invoice.save()
return booking
def get_annual_admissions_pricing_info(annual_booking_period_id,vessel_size):
nowdt = datetime.now()
price = '0.00'
annual_admissions = {'response': 'error', 'abpg': {}, 'abpo': {}, 'abpovc': {}}
if models.AnnualBookingPeriodGroup.objects.filter(id=int(annual_booking_period_id)).count() > 0:
abpg = models.AnnualBookingPeriodGroup.objects.get(id=int(annual_booking_period_id))
vsc = models.VesselSizeCategory.objects.filter(start_size__lte=Decimal(vessel_size),end_size__gte=Decimal(vessel_size))
abpo= models.AnnualBookingPeriodOption.objects.filter(start_time__lte=nowdt,finish_time__gte=nowdt,annual_booking_period_group=abpg)
if abpo.count() > 0 and vsc.count() > 0:
abpovc = models.AnnualBookingPeriodOptionVesselCategoryPrice.objects.filter(annual_booking_period_option=abpo[0],vessel_category=vsc[0])
price = abpovc[0].price
annual_admissions['abpg'] = abpg
if abpo.count() > 0:
annual_admissions['abpo'] = abpo[0]
if abpovc.count() > 0:
annual_admissions['abpovc'] = abpovc[0]
annual_admissions['response'] = 'success'
return annual_admissions
def iiiicreate_temp_bookingupdate(request,arrival,departure,booking_details,old_booking,total_price):
# delete all the campsites in the old moving so as to transfer them to the new booking
old_booking.campsites.all().delete()
booking = create_booking_by_site(booking_details['campsites'][0],
start_date = arrival,
end_date = departure,
num_adult = booking_details['num_adult'],
num_concession= booking_details['num_concession'],
num_child= booking_details['num_child'],
num_infant= booking_details['num_infant'],
num_mooring = booking_details['num_mooring'],
cost_total = total_price,
customer = old_booking.customer,
updating_booking = True
)
# Move all the vehicles to the new booking
for r in old_booking.regos.all():
r.booking = booking
r.save()
lines = price_or_lineitems(request,booking,booking.campsite_id_list)
booking_arrival = booking.arrival.strftime('%d-%m-%Y')
booking_departure = booking.departure.strftime('%d-%m-%Y')
reservation = "Reservation for {} from {} to {} at {}".format('{} {}'.format(booking.customer.first_name,booking.customer.last_name),booking_arrival,booking_departure,booking.mooringarea.name)
# Proceed to generate invoice
checkout_response = checkout(request,booking,lines,invoice_text=reservation,internal=True)
internal_create_booking_invoice(booking, checkout_response)
# Get the new invoice
new_invoice = booking.invoices.first()
# Check if the booking is a legacy booking and doesn't have an invoice
if old_booking.legacy_id and old_booking.invoices.count() < 1:
# Create a cash transaction in order to fix the outstnding invoice payment
CashTransaction.objects.create(
invoice = Invoice.objects.get(reference=new_invoice.invoice_reference),
amount = old_booking.cost_total,
type = 'move_in',
source = 'cash',
details = 'Transfer of funds from migrated booking',
movement_reference='Migrated Booking Funds'
)
# Update payment details for the new invoice
update_payments(new_invoice.invoice_reference)
# Attach new invoices to old booking
for i in old_booking.invoices.all():
inv = Invoice.objects.get(reference=i.invoice_reference)
inv.voided = True
#transfer to the new invoice
inv.move_funds(inv.transferable_amount,Invoice.objects.get(reference=new_invoice.invoice_reference),'Transfer of funds from {}'.format(inv.reference))
inv.save()
# Change the booking for the selected invoice
new_invoice.booking = old_booking
new_invoice.save()
return booking
def update_booking(request,old_booking,booking_details):
same_dates = False
same_campsites = False
same_campground = False
same_details = False
same_vehicles = True
with transaction.atomic():
try:
set_session_booking(request.session, old_booking)
new_details = {}
new_details.update(old_booking.details)
# Update the guests
new_details['num_adult'] = booking_details['num_adult']
new_details['num_concession'] = booking_details['num_concession']
new_details['num_child'] = booking_details['num_child']
new_details['num_infant'] = booking_details['num_infant']
booking = Booking(
arrival = booking_details['start_date'],
departure =booking_details['end_date'],
details = new_details,
customer=old_booking.customer,
mooringarea = MooringArea.objects.get(id=booking_details['mooringarea']))
# Check that the departure is not less than the arrival
if booking.departure < booking.arrival:
raise Exception('The departure date cannot be before the arrival date')
today = datetime.now().date()
if today > old_booking.departure:
raise ValidationError('You cannot change a booking past the departure date.')
# Check if it is the same campground
if old_booking.mooringarea.id == booking.mooringarea.id:
same_campground = True
# Check if dates are the same
if (old_booking.arrival == booking.arrival) and (old_booking.departure == booking.departure):
same_dates = True
# Check if the campsite is the same
if sorted(old_booking.campsite_id_list) == sorted(booking_details['campsites']):
same_campsites = True
# Check if the details have changed
if new_details == old_booking.details:
same_details = True
# Check if the vehicles have changed
current_regos = old_booking.regos.all()
current_vehicle_regos= sorted([r.rego for r in current_regos])
# Add history
new_history = old_booking._generate_history(user=request.user)
if request.data.get('entryFees').get('regos'):
new_regos = request.data['entryFees'].pop('regos')
sent_regos = [r['rego'] for r in new_regos]
regos_serializers = []
update_regos_serializers = []
for n in new_regos:
if n['rego'] not in current_vehicle_regos:
n['booking'] = old_booking.id
regos_serializers.append(BookingRegoSerializer(data=n))
same_vehicles = False
else:
booking_rego = BookingVehicleRego.objects.get(booking=old_booking,rego=n['rego'])
n['booking'] = old_booking.id
if booking_rego.type != n['type'] or booking_rego.entry_fee != n['entry_fee']:
update_regos_serializers.append(BookingRegoSerializer(booking_rego,data=n))
# Create the new regos if they are there
if regos_serializers:
for r in regos_serializers:
r.is_valid(raise_exception=True)
r.save()
# Update the new regos if they are there
if update_regos_serializers:
for r in update_regos_serializers:
r.is_valid(raise_exception=True)
r.save()
same_vehicles = False
# Check if there are regos in place that need to be removed
stale_regos = []
for r in current_regos:
if r.rego not in sent_regos:
stale_regos.append(r.id)
# delete stale regos
if stale_regos:
same_vehicles = False
BookingVehicleRego.objects.filter(id__in=stale_regos).delete()
else:
same_vehicles = False
if current_regos:
current_regos.delete()
if same_campsites and same_dates and same_vehicles and same_details:
if new_history is not None:
new_history.delete()
return old_booking
# Check difference of dates in booking
old_booking_days = int((old_booking.departure - old_booking.arrival).days)
new_days = int((booking_details['end_date'] - booking_details['start_date']).days)
date_diff = check_date_diff(old_booking,booking)
total_price = price_or_lineitems(request,booking,booking_details['campsites'],lines=False,old_booking=old_booking)
price_diff = True
if old_booking.cost_total != total_price:
price_diff = True
if price_diff:
booking = create_temp_bookingupdate(request,booking.arrival,booking.departure,booking_details,old_booking,total_price)
# Attach campsite booking objects to old booking
for c in booking.campsites.all():
c.booking = old_booking
c.save()
# Move all the vehicles to the in new booking to the old booking
for r in booking.regos.all():
r.booking = old_booking
r.save()
old_booking.cost_total = booking.cost_total
old_booking.departure = booking.departure
old_booking.arrival = booking.arrival
old_booking.details.update(booking.details)
if not same_campground:
old_booking.campground = booking.campground
old_booking.save()
booking.delete()
delete_session_booking(request.session)
send_booking_invoice(old_booking)
# send out the confirmation email if the booking is paid or over paid
if old_booking.status == 'Paid' or old_booking.status == 'Over Paid':
send_booking_confirmation(old_booking,request)
return old_booking
except:
delete_session_booking(request.session)
print(traceback.print_exc())
raise
def create_or_update_booking(request,booking_details,updating=False,override_checks=False):
booking = None
if not updating:
booking = create_booking_by_site(booking_details['campsites'],
start_date = booking_details['start_date'],
end_date=booking_details['end_date'],
num_adult=booking_details['num_adult'],
num_concession=booking_details['num_concession'],
num_child=booking_details['num_child'],
num_infant=booking_details['num_infant'],
num_mooring=booking_details['num_mooring'],
vessel_size=booking_details['vessel_size'],
cost_total=booking_details['cost_total'],
override_price=booking_details['override_price'],
override_reason=booking_details['override_reason'],
override_reason_info=booking_details['override_reason_info'],
overridden_by=booking_details['overridden_by'],
customer=booking_details['customer'],
override_checks=override_checks
)
booking.details['first_name'] = booking_details['first_name']
booking.details['last_name'] = booking_details['last_name']
booking.details['phone'] = booking_details['phone']
booking.details['country'] = booking_details['country']
booking.details['postcode'] = booking_details['postcode']
# Add booking regos
if 'regos' in booking_details:
regos = booking_details['regos']
for r in regos:
r['booking'] = booking.id
regos_serializers = [BookingRegoSerializer(data=r) for r in regos]
for r in regos_serializers:
r.is_valid(raise_exception=True)
r.save()
booking.save()
return booking
def old_create_or_update_booking(request,booking_details,updating=False):
booking = None
if not updating:
booking = create_booking_by_site(campsite_id= booking_details['campsite_id'],
start_date = booking_details['start_date'],
end_date=booking_details['end_date'],
num_adult=booking_details['num_adult'],
num_concession=booking_details['num_concession'],
num_child=booking_details['num_child'],
num_infant=booking_details['num_infant'],
num_mooring=booking_details['num_mooring'],
vessel_size=booking_details['vessel_size'],
cost_total=booking_details['cost_total'],
customer=booking_details['customer'])
booking.details['first_name'] = booking_details['first_name']
booking.details['last_name'] = booking_details['last_name']
booking.details['phone'] = booking_details['phone']
booking.details['country'] = booking_details['country']
booking.details['postcode'] = booking_details['postcode']
# Add booking regos
if request.data.get('parkEntry').get('regos'):
regos = request.data['parkEntry'].pop('regos')
for r in regos:
r[u'booking'] = booking.id
regos_serializers = [BookingRegoSerializer(data=r) for r in regos]
for r in regos_serializers:
r.is_valid(raise_exception=True)
r.save()
booking.save()
return booking
def admissionsCheckout(request, admissionsBooking, lines, invoice_text=None, vouchers=[], internal=False):
basket_params = {
'products': lines,
'vouchers': vouchers,
'system': settings.PS_PAYMENT_SYSTEM_ID,
'custom_basket': True,
'booking_reference': 'AD-'+str(admissionsBooking.id)
}
basket, basket_hash = create_basket_session(request, basket_params)
checkout_params = {
'system': settings.PS_PAYMENT_SYSTEM_ID,
'fallback_url': request.build_absolute_uri('/'),
'return_url': request.build_absolute_uri(reverse('public_admissions_success')),
'return_preload_url': request.build_absolute_uri(reverse('public_admissions_success')),
'force_redirect': True,
'proxy': True if internal else False,
'invoice_text': invoice_text,
}
if internal or request.user.is_anonymous():
checkout_params['basket_owner'] = admissionsBooking.customer.id
create_checkout_session(request, checkout_params)
if internal:
responseJson = place_order_submission(request)
else:
print(reverse('checkout:index'))
responseJson = HttpResponse(geojson.dumps({'status': 'success','redirect': reverse('checkout:index'),}), content_type='application/json')
# response = HttpResponseRedirect(reverse('checkout:index'))
# inject the current basket into the redirect response cookies
# or else, anonymous users will be directionless
responseJson.set_cookie(
settings.OSCAR_BASKET_COOKIE_OPEN, basket_hash,
max_age=settings.OSCAR_BASKET_COOKIE_LIFETIME,
secure=settings.OSCAR_BASKET_COOKIE_SECURE, httponly=True
)
return responseJson
def get_basket(request):
return get_cookie_basket(settings.OSCAR_BASKET_COOKIE_OPEN,request)
def annual_admission_checkout(request, booking, lines, invoice_text=None, vouchers=[], internal=False):
basket_params = {
'products': lines,
'vouchers': vouchers,
'system': settings.PS_PAYMENT_SYSTEM_ID,
'custom_basket': True,
'booking_reference': 'AA-'+str(booking.id)
}
basket, basket_hash = create_basket_session(request, basket_params)
checkout_params = {
'system': settings.PS_PAYMENT_SYSTEM_ID,
'fallback_url': request.build_absolute_uri('/'),
'return_url': request.build_absolute_uri(reverse('public_booking_annual_admission_success')),
'return_preload_url': request.build_absolute_uri(reverse('public_booking_annual_admission_success')),
'force_redirect': True,
'proxy': True if internal else False,
'invoice_text': invoice_text,
}
if internal or request.user.is_anonymous():
checkout_params['basket_owner'] = booking.customer.id
create_checkout_session(request, checkout_params)
# if internal:
# response = place_order_submission(request)
# else:
response = HttpResponseRedirect(reverse('checkout:index'))
# inject the current basket into the redirect response cookies
# or else, anonymous users will be directionless
response.set_cookie(
settings.OSCAR_BASKET_COOKIE_OPEN, basket_hash,
max_age=settings.OSCAR_BASKET_COOKIE_LIFETIME,
secure=settings.OSCAR_BASKET_COOKIE_SECURE, httponly=True
)
#if booking.cost_total < 0:
# response = HttpResponseRedirect('/refund-payment')
# response.set_cookie(
# settings.OSCAR_BASKET_COOKIE_OPEN, basket_hash,
# max_age=settings.OSCAR_BASKET_COOKIE_LIFETIME,
# secure=settings.OSCAR_BASKET_COOKIE_SECURE, httponly=True
# )
## Zero booking costs
#if booking.cost_total < 1 and booking.cost_total > -1:
# response = HttpResponseRedirect('/no-payment')
# response.set_cookie(
# settings.OSCAR_BASKET_COOKIE_OPEN, basket_hash,
# max_age=settings.OSCAR_BASKET_COOKIE_LIFETIME,
# secure=settings.OSCAR_BASKET_COOKIE_SECURE, httponly=True
# )
return response
def checkout(request, booking, lines, invoice_text=None, vouchers=[], internal=False):
basket_params = {
'products': lines,
'vouchers': vouchers,
'system': settings.PS_PAYMENT_SYSTEM_ID,
'custom_basket': True,
'booking_reference': 'PS-'+str(booking.id)
}
basket, basket_hash = create_basket_session(request, basket_params)
checkout_params = {
'system': settings.PS_PAYMENT_SYSTEM_ID,
'fallback_url': request.build_absolute_uri('/'),
'return_url': request.build_absolute_uri(reverse('public_booking_success')),
'return_preload_url': request.build_absolute_uri(reverse('public_booking_success')),
'force_redirect': True,
'proxy': True if internal else False,
'invoice_text': invoice_text,
}
# if not internal:
# checkout_params['check_url'] = request.build_absolute_uri('/api/booking/{}/booking_checkout_status.json'.format(booking.id))
if internal or request.user.is_anonymous():
checkout_params['basket_owner'] = booking.customer.id
print ("BOOKING ID 3")
print (request.session['ps_booking'])
create_checkout_session(request, checkout_params)
print ("BOOKING ID 4")
print (request.session['ps_booking'])
# if internal:
# response = place_order_submission(request)
# else:
#response = HttpResponseRedirect(reverse('checkout:index'))
response = HttpResponse("<script> window.location='"+reverse('checkout:index')+"';</script> <a href='"+reverse('checkout:index')+"'> Redirecting please wait: "+reverse('checkout:index')+"</a>")
# inject the current basket into the redirect response cookies
# or else, anonymous users will be directionless
response.set_cookie(
settings.OSCAR_BASKET_COOKIE_OPEN, basket_hash,
max_age=settings.OSCAR_BASKET_COOKIE_LIFETIME,
secure=settings.OSCAR_BASKET_COOKIE_SECURE, httponly=True
)
if booking.cost_total < 0:
response = HttpResponseRedirect('/refund-payment')
response.set_cookie(
settings.OSCAR_BASKET_COOKIE_OPEN, basket_hash,
max_age=settings.OSCAR_BASKET_COOKIE_LIFETIME,
secure=settings.OSCAR_BASKET_COOKIE_SECURE, httponly=True
)
# Zero booking costs
if booking.cost_total < 1 and booking.cost_total > -1:
response = HttpResponseRedirect('/no-payment')
response.set_cookie(
settings.OSCAR_BASKET_COOKIE_OPEN, basket_hash,
max_age=settings.OSCAR_BASKET_COOKIE_LIFETIME,
secure=settings.OSCAR_BASKET_COOKIE_SECURE, httponly=True
)
return response
def allocate_failedrefund_to_unallocated(request, booking, lines, invoice_text=None, internal=False, order_total='0.00',user=None):
booking_reference = None
if booking.__class__.__name__ == 'AdmissionsBooking':
booking_reference = 'AD-'+str(booking.id)
elif booking.__class__.__name__ == 'BookingAnnualAdmission':
booking_reference = 'AA-'+str(booking.id)
else:
booking_reference = 'PS-'+str(booking.id)
basket_params = {
'products': lines,
'vouchers': [],
'system': settings.PS_PAYMENT_SYSTEM_ID,
'custom_basket': True,
'booking_reference': booking_reference
}
basket, basket_hash = create_basket_session(request, basket_params)
ci = utils.CreateInvoiceBasket()
order = ci.create_invoice_and_order(basket, total=None, shipping_method='No shipping required',shipping_charge=False, user=user, status='Submitted', invoice_text='Refund Allocation Pool', )
#basket.status = 'Submitted'
#basket.save()
#new_order = Order.objects.get(basket=basket)
new_invoice = Invoice.objects.get(order_number=order.number)
update_payments(new_invoice.reference)
if booking.__class__.__name__ == 'AdmissionsBooking':
print ("AdmissionsBooking")
book_inv, created = AdmissionsBookingInvoice.objects.get_or_create(admissions_booking=booking, invoice_reference=new_invoice.reference, system_invoice=True)
elif booking.__class__.__name__ == 'BookingAnnualAdmission':
print ("BookingAnnualAdmission")
book_inv, created = models.BookingAnnualInvoice.objects.get_or_create(booking_annual_admission=booking, invoice_reference=new_invoice.reference, system_invoice=True)
else:
book_inv, created = BookingInvoice.objects.get_or_create(booking=booking, invoice_reference=new_invoice.reference, system_invoice=True)
return order
def allocate_refund_to_invoice(request, booking, lines, invoice_text=None, internal=False, order_total='0.00',user=None):
booking_reference = None
if booking.__class__.__name__ == 'AdmissionsBooking':
booking_reference = 'AD-'+str(booking.id)
elif booking.__class__.__name__ == 'BookingAnnualAdmission':
booking_reference = 'AA-'+str(booking.id)
else:
booking_reference = 'PS-'+str(booking.id)
basket_params = {
'products': lines,
'vouchers': [],
'system': settings.PS_PAYMENT_SYSTEM_ID,
'custom_basket': True,
'booking_reference': booking_reference
}
basket, basket_hash = create_basket_session(request, basket_params)
ci = utils.CreateInvoiceBasket()
order = ci.create_invoice_and_order(basket, total=None, shipping_method='No shipping required',shipping_charge=False, user=user, status='Submitted', invoice_text='Oracle Allocation Pools', )
#basket.status = 'Submitted'
#basket.save()
#new_order = Order.objects.get(basket=basket)
new_invoice = Invoice.objects.get(order_number=order.number)
update_payments(new_invoice.reference)
if booking.__class__.__name__ == 'AdmissionsBooking':
print ("AdmissionsBooking")
book_inv, created = AdmissionsBookingInvoice.objects.get_or_create(admissions_booking=booking, invoice_reference=new_invoice.reference, system_invoice=True)
elif booking.__class__.__name__ == 'BookingAnnualAdmission':
print ("BookingAnnualAdmission")
book_inv, created = models.BookingAnnualInvoice.objects.get_or_create(booking_annual_admission=booking, invoice_reference=new_invoice.reference, system_invoice=True)
else:
book_inv, created = BookingInvoice.objects.get_or_create(booking=booking, invoice_reference=new_invoice.reference, system_invoice=True)
return order
def old_internal_create_booking_invoice(booking, checkout_response):
if not checkout_response.history:
raise Exception('There was a problem retrieving the invoice for this booking')
last_redirect = checkout_response.history[-2]
reference = last_redirect.url.split('=')[1]
try:
Invoice.objects.get(reference=reference)
except Invoice.DoesNotExist:
raise Exception("There was a problem attaching an invoice for this booking")
book_inv = BookingInvoice.objects.get_or_create(booking=booking,invoice_reference=reference)
return book_inv
def internal_create_booking_invoice(booking, reference):
try:
Invoice.objects.get(reference=reference)
except Invoice.DoesNotExist:
raise Exception("There was a problem attaching an invoice for this booking")
book_inv = BookingInvoice.objects.get_or_create(booking=booking,invoice_reference=reference)
return book_inv
def internal_booking(request,booking_details,internal=True,updating=False):
json_booking = request.data
booking = None
try:
booking = create_or_update_booking(request, booking_details, updating, override_checks=internal)
with transaction.atomic():
set_session_booking(request.session,booking)
# Get line items
booking_arrival = booking.arrival.strftime('%d-%m-%Y')
booking_departure = booking.departure.strftime('%d-%m-%Y')
reservation = u"Reservation for {} confirmation PS{}".format(u'{} {}'.format(booking.customer.first_name,booking.customer.last_name), booking.id)
lines = price_or_lineitems(request,booking,booking.campsite_id_list)
# Proceed to generate invoice
checkout_response = checkout(request,booking,lines,invoice_text=reservation,internal=True)
# Change the type of booking
booking.booking_type = 0
booking.save()
# FIXME: replace with session check
invoice = None
if 'invoice=' in checkout_response.url:
invoice = checkout_response.url.split('invoice=', 1)[1]
else:
for h in reversed(checkout_response.history):
if 'invoice=' in h.url:
invoice = h.url.split('invoice=', 1)[1]
break
print ("-== internal_booking ==-")
internal_create_booking_invoice(booking, invoice)
delete_session_booking(request.session)
send_booking_invoice(booking)
return booking
except:
if booking:
booking.delete()
raise
def old_internal_booking(request,booking_details,internal=True,updating=False):
json_booking = request.data
booking = None
try:
booking = create_or_update_booking(request,booking_details,updating)
with transaction.atomic():
set_session_booking(request.session,booking)
# Get line items
booking_arrival = booking.arrival.strftime('%d-%m-%Y')
booking_departure = booking.departure.strftime('%d-%m-%Y')
reservation = u"Reservation for {} from {} to {} at {}".format(u'{} {}'.format(booking.customer.first_name,booking.customer.last_name),booking_arrival,booking_departure,booking.mooringarea.name)
lines = price_or_lineitems(request,booking,booking.campsite_id_list)
# Proceed to generate invoice
checkout_response = checkout(request,booking,lines,invoice_text=reservation,internal=True)
# Change the type of booking
booking.booking_type = 0
booking.save()
internal_create_booking_invoice(booking, checkout_response)
delete_session_booking(request.session)
send_booking_invoice(booking)
return booking
except:
if booking:
booking.delete()
raise
def set_session_booking(session, booking):
session['ps_booking'] = booking.id
session.modified = True
def get_session_admissions_booking(session):
if 'ad_booking' in session:
booking_id = session['ad_booking']
else:
raise Exception('Admissions booking not in Session')
try:
return AdmissionsBooking.objects.get(id=booking_id)
except AdmissionsBooking.DoesNotExist:
raise Exception('Admissions booking not found for booking_id {}'.format(booking_id))
def get_annual_admission_session_booking(session):
if 'annual_admission_booking' in session:
booking_id = session['annual_admission_booking']
else:
raise Exception('Annual Admission Booking not in Session')
try:
return BookingAnnualAdmission.objects.get(id=booking_id)
except BookingAnnualAdmission.DoesNotExist:
raise Exception('Annual Admission Booking not found for booking_id {}'.format(booking_id))
def delete_annual_admission_session_booking(session):
if 'annual_admission_booking' in session:
del session['annual_admission_booking']
session.modified = True
def delete_session_admissions_booking(session):
if 'ad_booking' in session:
del session['ad_booking']
session.modified = True
def get_session_booking(session):
if 'ps_booking' in session:
booking_id = session['ps_booking']
else:
raise Exception('Booking not in Session')
try:
return Booking.objects.get(id=booking_id)
except Booking.DoesNotExist:
raise Exception('Booking not found for booking_id {}'.format(booking_id))
def delete_session_booking(session):
if 'ps_booking' in session:
del session['ps_booking']
session.modified = True
def daterange(start_date, end_date):
for n in range(int ((end_date - start_date).days)):
yield start_date + timedelta(n)
def oracle_integration(date,override):
system = '0516'
oracle_codes = oracle_parser_on_invoice(date,system,'Mooring Booking',override=override)
def admissions_lines(booking_mooring):
lines = []
for bm in booking_mooring:
# Convert the from and to dates of this booking to just plain dates in local time.
# Append them to a list.
if bm.campsite.mooringarea.park.entry_fee_required:
from_dt = bm.from_dt
timestamp = calendar.timegm(from_dt.timetuple())
local_dt = datetime.fromtimestamp(timestamp)
from_dt = local_dt.replace(microsecond=from_dt.microsecond)
to_dt = bm.to_dt
timestamp = calendar.timegm(to_dt.timetuple())
local_dt = datetime.fromtimestamp(timestamp)
to_dt = local_dt.replace(microsecond=to_dt.microsecond)
group = MooringAreaGroup.objects.filter(moorings__in=[bm.campsite.mooringarea,])[0].id
lines.append({'from': from_dt, 'to': to_dt, 'group':group})
# Sort the list by date from.
new_lines = sorted(lines, key=lambda line: line['from'])
i = 0
lines = []
latest_from = None
latest_to = None
# Loop through the list, if first instance, then this line's from date is the first admission fee.
# Then compare this TO value to the next FROM value. If they are not the same or overlapping dates
# add this date to the list, using the latest from and this TO value.
while i < len(new_lines):
if i == 0:
latest_from = new_lines[i]['from'].date()
if i < len(new_lines)-1:
if new_lines[i]['to'].date() < new_lines[i+1]['from'].date():
latest_to = new_lines[i]['to'].date()
else:
# if new_lines[i]['from'].date() > new_lines[i-1]['to'].date():
latest_to = new_lines[i]['to'].date()
if latest_to:
lines.append({"rowid":'admission_fee_id'+str(i), 'id': i,'from':datetime.strftime(latest_from, '%d %b %Y'), 'to': datetime.strftime(latest_to, '%d %b %Y'), 'admissionFee': 0, 'group': new_lines[i]['group']})
if i < len(new_lines)-1:
latest_from = new_lines[i+1]['from'].date()
latest_to = None
i+= 1
return lines
# Access Level check for Group
def mooring_group_access_level_change(pk,request):
mooring_groups = MooringAreaGroup.objects.filter(members__in=[request.user,])
if request.user.is_superuser is True:
return True
else:
if ChangeGroup.objects.filter(pk=pk,mooring_group__in=mooring_groups).count() > 0:
return True
return False
def mooring_group_access_level_cancel(pk,request):
mooring_groups = MooringAreaGroup.objects.filter(members__in=[request.user,])
if request.user.is_superuser is True:
return True
else:
if CancelGroup.objects.filter(pk=pk,mooring_group__in=mooring_groups).count() > 0:
return True
return False
def mooring_group_access_level_change_options(cg,pk,request):
mooring_groups = MooringAreaGroup.objects.filter(members__in=[request.user,])
if request.user.is_superuser is True:
return True
else:
cpp = ChangePricePeriod.objects.get(id=pk)
if ChangeGroup.objects.filter(id=cg,change_period__in=[cpp],mooring_group__in=mooring_groups).count() > 0:
return True
return False
def mooring_group_access_level_cancel_options(cg,pk,request):
mooring_groups = MooringAreaGroup.objects.filter(members__in=[request.user,])
if request.user.is_superuser is True:
return True
else:
cpp = CancelPricePeriod.objects.get(id=pk)
if CancelGroup.objects.filter(id=cg,cancel_period__in=[cpp],mooring_group__in=mooring_groups).count() > 0:
return True
return False
def mooring_group_access_level_booking_period(pk,request):
mooring_groups = MooringAreaGroup.objects.filter(members__in=[request.user,])
if request.user.is_superuser is True:
return True
else:
if BookingPeriod.objects.filter(pk=pk,mooring_group__in=mooring_groups).count() > 0:
return True
return False
def mooring_group_access_level_annual_booking_period(pk,request):
mooring_groups = MooringAreaGroup.objects.filter(members__in=[request.user,])
if request.user.is_superuser is True:
return True
else:
if models.AnnualBookingPeriodGroup.objects.filter(pk=pk,mooring_group__in=mooring_groups).count() > 0:
return True
return False
def mooring_group_access_level_booking_period_option(pk,bp_group_id,request):
mooring_groups = MooringAreaGroup.objects.filter(members__in=[request.user,])
if request.user.is_superuser is True:
return True
else:
bpo = BookingPeriodOption.objects.get(id=pk)
if BookingPeriod.objects.filter(pk=bp_group_id,booking_period__in=[bpo],mooring_group__in=mooring_groups).count() > 0:
return True
return False
def check_mooring_admin_access(request):
if request.user.is_superuser is True:
return True
else:
if request.user.groups.filter(name__in=['Mooring Admin']).exists():
return True
return False
def get_provinces(country_code):
provinces = []
read_data = ""
json_response = []
with io.open(settings.BASE_DIR+'/mooring/data/provinces.json', "r", encoding="utf-8") as my_file:
read_data = my_file.read()
provinces = json.loads(read_data)
for p in provinces:
if p['country'] == country_code:
if 'short' in p:
json_response.append(p)
return json_response
def booking_success(basket, booking, context_processor):
print("MLINE 1.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
order = Order.objects.get(basket=basket[0])
invoice = Invoice.objects.get(order_number=order.number)
print("MLINE 1.02", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
invoice_ref = invoice.reference
book_inv, created = BookingInvoice.objects.get_or_create(booking=booking, invoice_reference=invoice_ref)
print("MLINE 1.03", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
#invoice_ref = request.GET.get('invoice')
if booking.booking_type == 3:
print("MLINE 2.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
try:
inv = Invoice.objects.get(reference=invoice_ref)
order = Order.objects.get(number=inv.order_number)
order.user = booking.customer
order.save()
except Invoice.DoesNotExist:
print ("INVOICE ERROR")
logger.error('{} tried making a booking with an incorrect invoice'.format('User {} with id {}'.format(booking.customer.get_full_name(),booking.customer.id) if booking.customer else 'An anonymous user'))
return redirect('public_make_booking')
if inv.system not in ['0516']:
print ("SYSTEM ERROR")
logger.error('{} tried making a booking with an invoice from another system with reference number {}'.format('User {} with id {}'.format(booking.customer.get_full_name(),booking.customer.id) if booking.customer else 'An anonymous user',inv.reference))
return redirect('public_make_booking')
print("MLINE 3.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
if book_inv:
print("MLINE 4.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
if booking.old_booking:
old_booking = Booking.objects.get(id=booking.old_booking.id)
old_booking.booking_type = 4
old_booking.cancelation_time = datetime.now()
old_booking.canceled_by = booking.created_by #request.user
old_booking.save()
booking_items = MooringsiteBooking.objects.filter(booking=old_booking)
# Find admissions booking for old booking
if old_booking.admission_payment:
old_booking.admission_payment.booking_type = 4
old_booking.admission_payment.cancelation_time = datetime.now()
old_booking.admission_payment.canceled_by = booking.created_by #request.user
old_booking.admission_payment.save()
for bi in booking_items:
bi.booking_type = 4
bi.save()
print("MLINE 5.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
booking_items_current = MooringsiteBooking.objects.filter(booking=booking)
for bi in booking_items_current:
if str(bi.id) in booking.override_lines:
bi.amount = Decimal(booking.override_lines[str(bi.id)])
bi.save()
print("MLINE 6.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
msb = MooringsiteBooking.objects.filter(booking=booking).order_by('from_dt')
from_date = msb[0].from_dt
to_date = msb[msb.count()-1].to_dt
timestamp = calendar.timegm(from_date.timetuple())
local_dt = datetime.fromtimestamp(timestamp)
from_dt = local_dt.replace(microsecond=from_date.microsecond)
from_date_converted = from_dt.date()
timestamp = calendar.timegm(to_date.timetuple())
local_dt = datetime.fromtimestamp(timestamp)
to_dt = local_dt.replace(microsecond=to_date.microsecond)
to_date_converted = to_dt.date()
booking.arrival = from_date_converted
booking.departure = to_date_converted
# set booking to be permanent fixture
booking.booking_type = 1 # internet booking
booking.expiry_time = None
print("MLINE 7.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
update_payments(invoice_ref)
print("MLINE 8.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
#Calculate Admissions and create object
if booking.admission_payment:
ad_booking = AdmissionsBooking.objects.get(pk=booking.admission_payment.pk)
#if request.user.__class__.__name__ == 'EmailUser':
ad_booking.created_by = booking.created_by
ad_booking.booking_type=1
print("MLINE 8.02", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
ad_booking.save()
print("MLINE 8.03", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
ad_invoice = AdmissionsBookingInvoice.objects.get_or_create(admissions_booking=ad_booking, invoice_reference=invoice_ref)
print("MLINE 8.04", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
for al in ad_booking.override_lines.keys():
ad_line = AdmissionsLine.objects.get(id=int(al))
ad_line.cost = ad_booking.override_lines[str(al)]
ad_line.save()
print("MLINE 8.05", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
# booking.admission_payment = ad_booking
booking.save()
print("MLINE 9.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
#if not request.user.is_staff:
# print "USER IS NOT STAFF."
#request.session['ps_last_booking'] = booking.id
#utils.delete_session_booking(request.session)
# send out the invoice before the confirmation is sent if total is greater than zero
#if booking.cost_total > 0:
print("MLINE 10.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
try:
emails.send_booking_invoice(booking,context_processor)
except Exception as e:
print ("Error Sending Invoice ("+str(booking.id)+") :"+str(e))
# for fully paid bookings, fire off confirmation emaili
#if booking.invoice_status == 'paid':
print("MLINE 11.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
try:
emails.send_booking_confirmation(booking,context_processor)
except Exception as e:
print ("Error Sending Booking Confirmation ("+str(booking.id)+") :"+str(e))
print("MLINE 12.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
refund_failed = None
if models.RefundFailed.objects.filter(booking=booking).count() > 0:
refund_failed = models.RefundFailed.objects.filter(booking=booking)
# Create/Update Vessel in VesselDetails Table
print("MLINE 13.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
try:
if models.VesselDetail.objects.filter(rego_no=booking.details['vessel_rego']).count() > 0:
vd = models.VesselDetail.objects.filter(rego_no=booking.details['vessel_rego'])
p = vd[0]
p.vessel_size=booking.details['vessel_size']
p.vessel_draft=booking.details['vessel_draft']
p.vessel_beam=booking.details['vessel_beam']
p.vessel_weight=booking.details['vessel_weight']
p.save()
else:
models.VesselDetail.objects.create(rego_no=booking.details['vessel_rego'],
vessel_size=booking.details['vessel_size'],
vessel_draft=booking.details['vessel_draft'],
vessel_beam=booking.details['vessel_beam'],
vessel_weight=booking.details['vessel_weight']
)
print("MLINE 14.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
except:
print ("ERROR: create vesseldetails on booking success")
context = {
'booking': booking,
'book_inv': [book_inv],
'refund_failed' : refund_failed
}
print("MLINE 15.01", datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
return context
def booking_annual_admission_success(basket, booking, context_processor):
order = Order.objects.get(basket=basket[0])
invoice = Invoice.objects.get(order_number=order.number)
invoice_ref = invoice.reference
book_inv, created = models.BookingAnnualInvoice.objects.get_or_create(booking_annual_admission=booking, invoice_reference=invoice_ref)
#invoice_ref = request.GET.get('invoice')
if booking.booking_type == 3:
try:
inv = Invoice.objects.get(reference=invoice_ref)
order = Order.objects.get(number=inv.order_number)
order.user = booking.customer
order.save()
except Invoice.DoesNotExist:
print ("INVOICE ERROR")
logger.error('{} tried making a booking with an incorrect invoice'.format('User {} with id {}'.format(booking.customer.get_full_name(),booking.customer.id) if booking.customer else 'An anonymous user'))
return redirect('public_make_booking')
if inv.system not in ['0516']:
print ("SYSTEM ERROR")
logger.error('{} tried making a booking with an invoice from another system with reference number {}'.format('User {} with id {}'.format(booking.customer.get_full_name(),booking.customer.id) if booking.customer else 'An anonymous user',inv.reference))
return redirect('public_make_booking')
if book_inv:
# set booking to be permanent fixture
booking.booking_type = 1 # internet booking
booking.expiry_time = None
update_payments(invoice_ref)
#Calculate Admissions and create object
booking.save()
#if not request.user.is_staff:
# print "USER IS NOT STAFF."
print ("SEND EMAIL")
try:
emails.send_annual_admission_booking_invoice(booking,context_processor)
except Exception as e:
print ("Error Sending Invoice ("+str(booking.id)+") :"+str(e))
try:
emails.send_new_annual_admission_booking_internal(booking,context_processor)
except Exception as e:
print ("Error Sending Booking Confirmation ("+str(booking.id)+") :"+str(e))
# for fully paid bookings, fire off confirmation emaili
#if booking.invoice_status == 'paid':
context = {
'booking': booking,
'book_inv': [book_inv],
}
try:
if models.VesselDetail.objects.filter(rego_no=booking.details['vessel_rego']).count() > 0:
vd = models.VesselDetail.objects.filter(rego_no=booking.details['vessel_rego'])
p = vd[0]
p.vessel_name=booking.details['vessel_name']
p.save()
except:
print ("ERROR: create vesseldetails on booking success")
print ("COMPLETED SUCCESS")
return context
def booking_admission_success(basket, booking, context_processor):
arrival = models.AdmissionsLine.objects.filter(admissionsBooking=booking)[0].arrivalDate
overnight = models.AdmissionsLine.objects.filter(admissionsBooking=booking)[0].overnightStay
order = Order.objects.get(basket=basket[0])
invoice = Invoice.objects.get(order_number=order.number)
invoice_ref = invoice.reference
#invoice_ref = request.GET.get('invoice')
if booking.booking_type == 3:
try:
inv = Invoice.objects.get(reference=invoice_ref)
order = Order.objects.get(number=inv.order_number)
order.user = booking.customer
order.save()
except Invoice.DoesNotExist:
logger.error('{} tried making a booking with an incorrect invoice'.format('User {} with id {}'.format(booking.customer.get_full_name(),booking.customer.id) if booking.customer else 'An anonymous user'))
return redirect('admissions', args=(booking.location.key,))
if inv.system not in ['0516']:
logger.error('{} tried making a booking with an invoice from another system with reference number {}'.format('User {} with id {}'.format(booking.customer.get_full_name(),booking.customer.id) if booking.customer else 'An anonymous user',inv.reference))
return redirect('admissions', args=(booking.location.key,))
try:
b = AdmissionsBookingInvoice.objects.get(invoice_reference=invoice_ref)
logger.error('{} tried making an admission booking with an already used invoice with reference number {}'.format('User {} with id {}'.format(booking.customer.get_full_name(),booking.customer.id) if booking.customer else 'An anonymous user',inv.reference))
return redirect('admissions', args=(booking.location.key,))
except AdmissionsBookingInvoice.DoesNotExist:
logger.info('{} finished temporary booking {}, creating new AdmissionBookingInvoice with reference {}'.format('User {} with id {}'.format(booking.customer.get_full_name(),booking.customer.id) if booking.customer else 'An anonymous user',booking.id, invoice_ref))
# FIXME: replace with server side notify_url callback
admissionsInvoice = AdmissionsBookingInvoice.objects.get_or_create(admissions_booking=booking, invoice_reference=invoice_ref)
#if request.user.__class__.__name__ == 'EmailUser':
# booking.created_by = request.user
# set booking to be permanent fixture
booking.booking_type = 1 # internet booking
booking.save()
#request.session['ad_last_booking'] = booking.id
#utils.delete_session_admissions_booking(request.session)
try:
# send out the invoice before the confirmation is sent
emails.send_admissions_booking_invoice(booking,context_processor)
except Exception as e:
print ("Error Sending Invoice ("+str(booking.id)+") :"+str(e))
try:
# for fully paid bookings, fire off confirmation email
emails.send_admissions_booking_confirmation(booking,context_processor)
except Exception as e:
print ("Error Sending Booking Confirmation ("+str(booking.id)+") :"+str(e))
context = {
'admissionsBooking': booking,
'arrival' : arrival,
'overnight': overnight,
'admissionsInvoice': [invoice_ref]
}
|
py | 1a43cdc03f2e0b9629625da92b3976e79f5686fc | #!/usr/bin/env python3
import sys
import os
import struct
import select
import time
import getopt
import tqdm
import socket
try:
optlist, args = getopt.getopt(sys.argv[1:], 's')
timeout = 0.01
n = 1024
slow = False
for o, a in optlist:
if o == "-s":
slow = True
n = 16
print('Running in slow mode')
out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host, port = args[1].split(':')
out.connect((host, int(port)))
size = os.path.getsize(args[0])
out.send(struct.pack('>I', size))
with open(args[0], 'rb') as f:
data = f.read()
for i in tqdm.tqdm(range(0, len(data), n)):
out.send(data[i:i+n])
if slow:
time.sleep(timeout)
out.close()
os.execlp('telnet', 'telnet', host, port.strip())
except getopt.GetoptError as err:
print(str(err))
print('Usage: send.py [-s] file host:port')
|
py | 1a43ceb30bdc9e648c4781b2c3b10176bd1d1ceb | """Sensor classes represent modbus registers for an inverter."""
from __future__ import annotations
import logging
from math import modf
from typing import Any, Dict, List, Sequence, Tuple, Union
import attr
_LOGGER = logging.getLogger(__name__)
def ensure_tuple(val: Any) -> Tuple[int]:
"""Return a tuple."""
if isinstance(val, tuple):
return val # type: ignore
if isinstance(val, int):
return (val,)
return tuple(val) # type: ignore
@attr.define(slots=True)
class Sensor:
"""Sunsynk sensor."""
reg_address: Tuple[int, ...] = attr.field(converter=ensure_tuple)
name: str = attr.field()
unit: str = attr.field(default="")
factor: float = attr.field(default=1)
value: Union[float, int, str, None] = None
# func: Union[
# None, Callable[[Tuple[int, ...]], str], Callable[[float], Any]
# ] = attr.field(default=None)
reg_value: Tuple[int, ...] = attr.field(init=False, factory=tuple)
def append_to(self, arr: List[Sensor]) -> Sensor:
"""Append to a list of sensors."""
arr.append(self)
return self
def reg_to_value(self, value: Tuple[int, ...]) -> Union[float, int, str, None]:
"""Update the reg_value and update."""
if isinstance(value, tuple):
self.reg_value = value
else:
self.reg_value = (value,)
self.update_value()
return self.value
@property
def id(self) -> str: # pylint: disable=invalid-name
"""Get the sensor ID."""
return slug(self.name)
def update_value(self) -> None:
"""Update the value from the reg_value."""
hval = self.reg_value[1] if len(self.reg_value) > 1 else 0
lval = self.reg_value[0]
_LOGGER.debug(
"%s low=%d high=%d value=%s%s",
self.name,
lval,
hval,
self.value,
self.unit,
)
self.value = (lval + (hval << 16)) * self.factor
if self.factor < 0: # Indicate this register is signed
self.value = -self.value
# Value might be negative.
if self.value > 0x7FFF:
self.value -= 0xFFFF
# if self.func:
# self.value = self.func(self.value) # type: ignore
# make integer/round?
if isinstance(self.value, float):
if modf(self.value)[0] == 0:
self.value = int(self.value)
else:
self.value = round(self.value, 2)
class HSensor(Sensor):
"""Hybrid sensor."""
class RWSensor(Sensor):
"""Read & write sensor."""
def group_sensors(
sensors: Sequence[Sensor], allow_gap: int = 3
) -> Sequence[Sequence[int]]:
"""Group sensor registers into blocks for reading."""
if not sensors:
return []
regs = set()
for sen in sensors:
regs |= set(sen.reg_address)
adr = sorted(regs)
cgroup = [adr[0]]
groups = [cgroup]
for idx in range(1, len(adr)):
gap = adr[idx] - adr[idx - 1]
if gap > allow_gap or len(cgroup) >= 60:
cgroup = []
groups.append(cgroup)
cgroup.append(adr[idx])
return groups
def update_sensors(sensors: Sequence[Sensor], registers: Dict[int, int]) -> None:
"""Update sensors."""
for sen in sensors:
try:
sen.reg_value = tuple(registers[i] for i in sen.reg_address)
except KeyError:
continue
sen.update_value()
def slug(name: str) -> str:
"""Create a slug."""
return name.lower().replace(" ", "_")
class TemperatureSensor(Sensor):
"""Offset by 100 for temperature."""
def update_value(self) -> None:
"""Offset by 100 for temperature."""
super().update_value()
try:
self.value = round(float(self.value) - 100, 2) # type: ignore
except (TypeError, ValueError) as err:
self.value = 0
_LOGGER.error("Could not decode temperature: %s", err)
class TimeRWSensor(RWSensor):
"""Extract the time."""
def update_value(self) -> None:
"""Extract the time."""
sval = str(self.reg_value[0])
self.value = f"{sval[:-2]}:{sval[-2:]}"
class SDStatusSensor(Sensor):
"""SD card status."""
def update_value(self) -> None:
"""SD card status."""
self.value = {
1000: "fault",
2000: "ok",
}.get(self.reg_value[0]) or f"unknown {self.reg_value[0]}"
class InverterStateSensor(Sensor):
"""Inverter status."""
def update_value(self) -> None:
"""Inverter status."""
if self.reg_value[0] == 2:
self.value = "ok"
else:
self.value = f"unknown {self.reg_value[0]}"
class SerialSensor(Sensor):
"""Decode Inverter serial number."""
def update_value(self) -> None:
"""Decode Inverter serial number."""
res = ""
for b16 in self.reg_value:
res += chr(b16 >> 8)
res += chr(b16 & 0xFF)
self.value = res
class FaultSensor(Sensor):
"""Decode Inverter faults."""
def update_value(self) -> None:
"""Decode Inverter faults."""
faults = {
13: "Working mode change",
18: "AC over current",
20: "DC over current",
23: "F23 AC leak current or transient over current",
24: "F24 DC insulation impedance",
26: "F26 DC busbar imbalanced",
29: "Parallel comms cable",
35: "No AC grid",
42: "AC line low voltage",
47: "AC freq high/low",
56: "DC busbar voltage low",
63: "ARC fault",
64: "Heat sink tempfailure",
}
err = []
off = 0
for b16 in self.reg_value:
for bit in range(16):
msk = 1 << bit
if msk & b16:
msg = f"F{bit+off+1:02} " + faults.get(off + msk, "")
err.append(msg.strip())
off += 16
self.value = ", ".join(err)
|
py | 1a43cecc724ff0634ea76df0b2723deb4268900e | from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer
from hazelcast.protocol.builtin import StringCodec
# hex: 0x0D0900
_REQUEST_MESSAGE_TYPE = 854272
# hex: 0x0D0901
_RESPONSE_MESSAGE_TYPE = 854273
_REQUEST_INITIAL_FRAME_SIZE = REQUEST_HEADER_SIZE
def encode_request(name):
buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE)
StringCodec.encode(buf, name, True)
return OutboundMessage(buf, False)
|
py | 1a43cf84ec7dba35d77ea132a3dd0c75c5adf280 | import gevent
import pytest
import requests
import responses
from eth_keys.exceptions import BadSignature, ValidationError
from eth_utils import decode_hex, keccak, to_canonical_address
from raiden.api.v1.encoding import CapabilitiesSchema
from raiden.exceptions import InvalidSignature
from raiden.network.utils import get_average_http_response_time
from raiden.settings import CapabilitiesConfig
from raiden.transfer.utils.secret import decrypt_secret, encrypt_secret
from raiden.utils.capabilities import capconfig_to_dict, capdict_to_config
from raiden.utils.keys import privatekey_to_publickey
from raiden.utils.signer import LocalSigner, Signer, recover
from raiden.utils.typing import UserID
def test_privatekey_to_publickey():
privkey = keccak(b"secret")
pubkey = (
"c283b0507c4ec6903a49fac84a5aead951f3c38b2c72b69da8a70a5bac91e9c"
"705f70c7554b26e82b90d2d1bbbaf711b10c6c8b807077f4070200a8fb4c6b771"
)
assert pubkey == privatekey_to_publickey(privkey).hex()
def test_signer_sign():
privkey = keccak(b"secret") # 0x38e959391dD8598aE80d5d6D114a7822A09d313A
message = b"message"
# generated with Metamask's web3.personal.sign
signature = decode_hex(
"0x1eff8317c59ab169037f5063a5129bb1bab0299fef0b5621d866b07be59e2c0a"
"6a404e88d3360fb58bd13daf577807c2cf9b6b26d80fc929c52e952769a460981c"
)
signer: Signer = LocalSigner(privkey)
assert signer.sign(message) == signature
def test_encrypt_secret():
privkey = keccak(b"secret")
message = b"message"
signer: Signer = LocalSigner(privkey)
signature = signer.sign(message)
encrypted_secret = encrypt_secret(
message, {"user_id": UserID(message.decode()), "displayname": signature.hex()}, 0, 0
)
decrypted_message, amount, payment_id = decrypt_secret(encrypted_secret, privkey)
assert decrypted_message == message
assert amount == 0
assert payment_id == 0
def test_recover():
account = to_canonical_address("0x38e959391dD8598aE80d5d6D114a7822A09d313A")
message = b"message"
# generated with Metamask's web3.personal.sign
signature = decode_hex(
"0x1eff8317c59ab169037f5063a5129bb1bab0299fef0b5621d866b07be59e2c0a"
"6a404e88d3360fb58bd13daf577807c2cf9b6b26d80fc929c52e952769a460981c"
)
assert recover(data=message, signature=signature) == account
@pytest.mark.parametrize(
("signature", "nested_exception"),
[
pytest.param(b"\x00" * 65, BadSignature, id="BadSignature"),
pytest.param(b"bla", ValidationError, id="ValidationError"),
],
)
def test_recover_exception(signature, nested_exception):
with pytest.raises(InvalidSignature) as exc_info:
recover(b"bla", signature)
assert isinstance(exc_info.value.__context__, nested_exception)
def test_get_http_rtt_happy(requests_responses):
"""Ensure get_http_rtt returns the average RTT over the number of samples."""
delay = iter([0.05, 0.05, 0.2])
def response(_):
gevent.sleep(next(delay))
return 200, {}, ""
requests_responses.add_callback(responses.GET, "http://url", callback=response)
result = get_average_http_response_time(url="http://url", method="get", samples=3)
assert 0.1 <= result[1] < 0.11 # exact answer is 0.1, but we have some overhead
def test_get_http_rtt_ignore_failing(requests_responses):
"""Ensure get_http_rtt ignores failing servers."""
# RequestException (e.g. DNS not resolvable, server not reachable)
requests_responses.add(responses.GET, "http://url1", body=requests.RequestException())
assert get_average_http_response_time(url="http://url1", method="get") is None
# Server misconfigured
requests_responses.add(responses.GET, "http://url2", status=404)
assert get_average_http_response_time(url="http://url2", method="get") is None
# Internal server error
requests_responses.add(responses.GET, "http://url3", status=500)
assert get_average_http_response_time(url="http://url3", method="get") is None
def test_deserialize_capabilities():
capabilities_schema = CapabilitiesSchema()
base_url = "mxc://raiden.network/cap"
capstring = f"{base_url}?foo=1&toad=1&bar=max&form=1&agar=1¬true=0&l=one&l=2"
parsed = capabilities_schema.load({"capabilities": capstring})["capabilities"]
assert parsed.get("foo") is True
assert parsed.get("toad") is True
assert parsed.get("bar") == "max"
assert parsed.get("agar") is True
assert parsed.get("nottrue") is False
assert parsed.get("l") == ["one", "2"]
assert not parsed.get("nothing")
assert capabilities_schema.dump({"capabilities": parsed})["capabilities"] == f"{capstring}"
parsed["false"] = False
# Explicit new value changes the serialization format
assert (
capabilities_schema.dump({"capabilities": parsed})["capabilities"] != f"mxc://{capstring}"
)
assert capabilities_schema.load({"capabilities": ""})["capabilities"] == {}
assert capabilities_schema.load({})["capabilities"] == "mxc://"
def test_capconfig_to_dict():
# test method supports adding unknown keys
config = CapabilitiesConfig()
config.foo = True
as_dict = capconfig_to_dict(config)
assert as_dict.get("foo") is True
assert as_dict.get("bar") is None
assert capdict_to_config(as_dict) == config
as_dict["bar"] = True
assert capdict_to_config(as_dict).bar is True # pylint: disable=no-member
|
py | 1a43d151cf7b8fde14038e577d35bb75638ac739 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# statistics.py: collect statistic data
#
# Copyright (C) 2014 Politecnico di Torino, Italy
# TORSEC group -- http://security.polito.it
#
# Author: Roberto Sassu <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see
# <http://www.gnu.org/licenses/>.
import time
class Timer(object):
start_time = 0
last_get_time = 0
current_time = 0
@classmethod
def start(self):
Timer.start_time = time.time()
Timer.current_time = Timer.start_time
@classmethod
def get_current(self):
return str(round(time.time() - Timer.start_time, 5))
@classmethod
def get_elapsed(self):
Timer.last_get_time = Timer.current_time
Timer.current_time = time.time()
return str(round(Timer.current_time - Timer.last_get_time, 5))
def __del__(cls):
print('Delete Timer object in statistics.py')
cls.start_time = 0
cls.last_get_time = 0
cls.current_time = 0
class Statistics(object):
global_stat = dict(time_parse_ima_list=0, time_exec_query=0,
time_build_graph=0, time_load_time_analysis=0,
time_run_time_analysis=0, time_total=0,
n_meas_code=0, n_meas_code_known=0,
n_meas_struct_data=0, n_meas_struct_data_known=0,
n_meas_unstruct_data=0, n_meas_violation=0,
n_tot_meas=0)
@classmethod
def inc_arch_stat(self, arch=None):
Statistics.arch_stat[arch] += 1
current_arch = Statistics.global_stat['distro_arch']
if (arch != current_arch and
Statistics.arch_stat[arch] >
Statistics.arch_stat[current_arch]):
Statistics.global_stat['distro_arch'] = arch
@classmethod
def inc_stat(self, stat_key=None, stat_value=None):
Statistics.global_stat[stat_key] += 1
@classmethod
def dec_stat(self, stat_key=None, stat_value=None):
Statistics.global_stat[stat_key] -= 1
@classmethod
def set_stat(self, stat_key=None, stat_value=None):
Statistics.global_stat[stat_key] = stat_value
@classmethod
def get_stat(self, stat_key=None):
return Statistics.global_stat[stat_key]
@classmethod
def start_timer(self):
Timer.start()
@classmethod
def set_elapsed_time(self, stat_key=None):
Statistics.global_stat[stat_key] = Timer.get_elapsed()
@classmethod
def set_current_time(self, stat_key=None):
Statistics.global_stat[stat_key] = Timer.get_current()
def __init__(self):
return
def __del__(cls):
print('Delete Statistics object in statistics.py')
cls.global_stat = dict(time_parse_ima_list=0, time_exec_query=0,
time_build_graph=0, time_load_time_analysis=0,
time_run_time_analysis=0, time_total=0,
n_meas_code=0, n_meas_code_known=0,
n_meas_struct_data=0,
n_meas_struct_data_known=0,
n_meas_unstruct_data=0, n_meas_violation=0,
n_tot_meas=0)
|
py | 1a43d18dbe9f39617a105a7d2664613f0cf37e82 | from typing import Any, Optional, Union
from chia.types.blockchain_format.sized_bytes import bytes32
import click
async def show_async(
rpc_port: Optional[int],
state: bool,
show_connections: bool,
exit_node: bool,
add_connection: str,
remove_connection: str,
block_header_hash_by_height: str,
block_by_header_hash: str,
) -> None:
import aiohttp
import time
import traceback
from time import localtime, struct_time
from typing import List, Optional
from chia.consensus.block_record import BlockRecord
from chia.rpc.full_node_rpc_client import FullNodeRpcClient
from chia.server.outbound_message import NodeType
from chia.types.full_block import FullBlock
from chia.util.bech32m import encode_puzzle_hash
from chia.util.byte_types import hexstr_to_bytes
from chia.util.config import load_config
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.ints import uint16
from chia.util.misc import format_bytes
try:
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
self_hostname = config["self_hostname"]
if rpc_port is None:
rpc_port = config["full_node"]["rpc_port"]
client = await FullNodeRpcClient.create(self_hostname, uint16(rpc_port), DEFAULT_ROOT_PATH, config)
if state:
blockchain_state = await client.get_blockchain_state()
if blockchain_state is None:
print("There is no blockchain found yet. Try again shortly")
return None
peak: Optional[BlockRecord] = blockchain_state["peak"]
difficulty = blockchain_state["difficulty"]
sub_slot_iters = blockchain_state["sub_slot_iters"]
synced = blockchain_state["sync"]["synced"]
sync_mode = blockchain_state["sync"]["sync_mode"]
total_iters = peak.total_iters if peak is not None else 0
num_blocks: int = 10
network_name = config["selected_network"]
genesis_challenge = config["farmer"]["network_overrides"]["constants"][network_name]["GENESIS_CHALLENGE"]
full_node_port = config["full_node"]["port"]
full_node_rpc_port = config["full_node"]["rpc_port"]
print(f"Network: {network_name} Port: {full_node_port} Rpc Port: {full_node_rpc_port}")
print(f"Genesis Challenge: {genesis_challenge}")
if synced:
print("Current Blockchain Status: Full Node Synced")
print("\nPeak: Hash:", peak.header_hash if peak is not None else "")
elif peak is not None and sync_mode:
sync_max_block = blockchain_state["sync"]["sync_tip_height"]
sync_current_block = blockchain_state["sync"]["sync_progress_height"]
print(f"Current Blockchain Status: Syncing {sync_current_block}/{sync_max_block}.")
print("Peak: Hash:", peak.header_hash if peak is not None else "")
elif peak is not None:
print(f"Current Blockchain Status: Not Synced. Peak height: {peak.height}")
else:
print("\nSearching for an initial chain\n")
print("You may be able to expedite with 'chia show -a host:port' using a known node.\n")
if peak is not None:
if peak.is_transaction_block:
peak_time = peak.timestamp
else:
peak_hash = peak.header_hash
curr = await client.get_block_record(peak_hash)
while curr is not None and not curr.is_transaction_block:
curr = await client.get_block_record(curr.prev_hash)
peak_time = curr.timestamp
peak_time_struct = struct_time(localtime(peak_time))
print(
" Time:",
f"{time.strftime('%a %b %d %Y %T %Z', peak_time_struct)}",
f" Height: {peak.height:>10}\n",
)
print("Estimated network space: ", end="")
print(format_bytes(blockchain_state["space"]))
print(f"Current difficulty: {difficulty}")
print(f"Current VDF sub_slot_iters: {sub_slot_iters}")
print("Total iterations since the start of the blockchain:", total_iters)
print("")
print(" Height: | Hash:")
added_blocks: List[BlockRecord] = []
curr = await client.get_block_record(peak.header_hash)
while curr is not None and len(added_blocks) < num_blocks and curr.height > 0:
added_blocks.append(curr)
curr = await client.get_block_record(curr.prev_hash)
for b in added_blocks:
print(f"{b.height:>9} | {b.header_hash}")
else:
print("Blockchain has no blocks yet")
# if called together with show_connections, leave a blank line
if show_connections:
print("")
if show_connections:
connections = await client.get_connections()
print("Connections:")
print(
"Type IP Ports NodeID Last Connect"
+ " MiB Up|Dwn"
)
for con in connections:
last_connect_tuple = struct_time(localtime(con["last_message_time"]))
last_connect = time.strftime("%b %d %T", last_connect_tuple)
mb_down = con["bytes_read"] / (1024 * 1024)
mb_up = con["bytes_written"] / (1024 * 1024)
host = con["peer_host"]
# Strip IPv6 brackets
host = host.strip("[]")
# Nodetype length is 9 because INTRODUCER will be deprecated
if NodeType(con["type"]) is NodeType.FULL_NODE:
peak_height = con["peak_height"]
connection_peak_hash = con["peak_hash"]
if connection_peak_hash is None:
connection_peak_hash = "No Info"
else:
if connection_peak_hash.startswith(("0x", "0X")):
connection_peak_hash = connection_peak_hash[2:]
connection_peak_hash = f"{connection_peak_hash[:8]}..."
if peak_height is None:
peak_height = 0
con_str = (
f"{NodeType(con['type']).name:9} {host:38} "
f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
f" {con['node_id'].hex()[:8]}... "
f"{last_connect} "
f"{mb_up:7.1f}|{mb_down:<7.1f}"
f"\n "
f"-SB Height: {peak_height:8.0f} -Hash: {connection_peak_hash}"
)
else:
con_str = (
f"{NodeType(con['type']).name:9} {host:38} "
f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
f" {con['node_id'].hex()[:8]}... "
f"{last_connect} "
f"{mb_up:7.1f}|{mb_down:<7.1f}"
)
print(con_str)
# if called together with state, leave a blank line
if state:
print("")
if exit_node:
node_stop = await client.stop_node()
print(node_stop, "Node stopped")
if add_connection:
if ":" not in add_connection:
print("Enter a valid IP and port in the following format: 10.5.4.3:8000")
else:
ip, port = (
":".join(add_connection.split(":")[:-1]),
add_connection.split(":")[-1],
)
print(f"Connecting to {ip}, {port}")
try:
await client.open_connection(ip, int(port))
except Exception:
print(f"Failed to connect to {ip}:{port}")
if remove_connection:
result_txt = ""
if len(remove_connection) != 8:
result_txt = "Invalid NodeID. Do not include '.'"
else:
connections = await client.get_connections()
for con in connections:
if remove_connection == con["node_id"].hex()[:8]:
print("Attempting to disconnect", "NodeID", remove_connection)
try:
await client.close_connection(con["node_id"])
except Exception:
result_txt = f"Failed to disconnect NodeID {remove_connection}"
else:
result_txt = f"NodeID {remove_connection}... {NodeType(con['type']).name} "
f"{con['peer_host']} disconnected"
elif result_txt == "":
result_txt = f"NodeID {remove_connection}... not found"
print(result_txt)
if block_header_hash_by_height != "":
block_header = await client.get_block_record_by_height(block_header_hash_by_height)
if block_header is not None:
print(f"Header hash of block {block_header_hash_by_height}: " f"{block_header.header_hash.hex()}")
else:
print("Block height", block_header_hash_by_height, "not found")
if block_by_header_hash != "":
block: Optional[BlockRecord] = await client.get_block_record(hexstr_to_bytes(block_by_header_hash))
full_block: Optional[FullBlock] = await client.get_block(hexstr_to_bytes(block_by_header_hash))
# Would like to have a verbose flag for this
if block is not None:
assert full_block is not None
prev_b = await client.get_block_record(block.prev_hash)
if prev_b is not None:
difficulty = block.weight - prev_b.weight
else:
difficulty = block.weight
if block.is_transaction_block:
assert full_block.transactions_info is not None
block_time = struct_time(
localtime(
full_block.foliage_transaction_block.timestamp
if full_block.foliage_transaction_block
else None
)
)
block_time_string = time.strftime("%a %b %d %Y %T %Z", block_time)
cost = str(full_block.transactions_info.cost)
tx_filter_hash: Union[str, bytes32] = "Not a transaction block"
if full_block.foliage_transaction_block:
tx_filter_hash = full_block.foliage_transaction_block.filter_hash
fees: Any = block.fees
else:
block_time_string = "Not a transaction block"
cost = "Not a transaction block"
tx_filter_hash = "Not a transaction block"
fees = "Not a transaction block"
address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
farmer_address = encode_puzzle_hash(block.farmer_puzzle_hash, address_prefix)
pool_address = encode_puzzle_hash(block.pool_puzzle_hash, address_prefix)
pool_pk = (
full_block.reward_chain_block.proof_of_space.pool_public_key
if full_block.reward_chain_block.proof_of_space.pool_public_key is not None
else "Pay to pool puzzle hash"
)
print(
f"Block Height {block.height}\n"
f"Header Hash 0x{block.header_hash.hex()}\n"
f"Timestamp {block_time_string}\n"
f"Weight {block.weight}\n"
f"Previous Block 0x{block.prev_hash.hex()}\n"
f"Difficulty {difficulty}\n"
f"Sub-slot iters {block.sub_slot_iters}\n"
f"Cost {cost}\n"
f"Total VDF Iterations {block.total_iters}\n"
f"Is a Transaction Block?{block.is_transaction_block}\n"
f"Deficit {block.deficit}\n"
f"PoSpace 'k' Size {full_block.reward_chain_block.proof_of_space.size}\n"
f"Plot Public Key 0x{full_block.reward_chain_block.proof_of_space.plot_public_key}\n"
f"Pool Public Key {pool_pk}\n"
f"Tx Filter Hash {tx_filter_hash}\n"
f"Farmer Address {farmer_address}\n"
f"Pool Address {pool_address}\n"
f"Fees Amount {fees}\n"
)
else:
print("Block with header hash", block_header_hash_by_height, "not found")
except Exception as e:
if isinstance(e, aiohttp.ClientConnectorError):
print(f"Connection error. Check if full node rpc is running at {rpc_port}")
print("This is normal if full node is still starting up")
else:
tb = traceback.format_exc()
print(f"Exception from 'show' {tb}")
client.close()
await client.await_closed()
@click.command("show", short_help="Show node information")
@click.option(
"-p",
"--rpc-port",
help=(
"Set the port where the Full Node is hosting the RPC interface. "
"See the rpc_port under full_node in config.yaml"
),
type=int,
default=None,
)
@click.option(
"-wp",
"--wallet-rpc-port",
help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
type=int,
default=None,
)
@click.option("-s", "--state", help="Show the current state of the blockchain", is_flag=True, type=bool, default=False)
@click.option(
"-c", "--connections", help="List nodes connected to this Full Node", is_flag=True, type=bool, default=False
)
@click.option("-e", "--exit-node", help="Shut down the running Full Node", is_flag=True, default=False)
@click.option("-a", "--add-connection", help="Connect to another Full Node by ip:port", type=str, default="")
@click.option(
"-r", "--remove-connection", help="Remove a Node by the first 8 characters of NodeID", type=str, default=""
)
@click.option(
"-bh", "--block-header-hash-by-height", help="Look up a block header hash by block height", type=str, default=""
)
@click.option("-b", "--block-by-header-hash", help="Look up a block by block header hash", type=str, default="")
def show_cmd(
rpc_port: Optional[int],
wallet_rpc_port: Optional[int],
state: bool,
connections: bool,
exit_node: bool,
add_connection: str,
remove_connection: str,
block_header_hash_by_height: str,
block_by_header_hash: str,
) -> None:
import asyncio
asyncio.run(
show_async(
rpc_port,
state,
connections,
exit_node,
add_connection,
remove_connection,
block_header_hash_by_height,
block_by_header_hash,
)
)
|
py | 1a43d19689775a5251c6a3546b90eef44536808a | # Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Note that the unit_tests/__init__.py has the following lines to stop
# side effects from the imorts from charm helpers.
# sys.path.append('./lib')
# mock out some charmhelpers libraries as they have apt install side effects
# sys.modules['charmhelpers.contrib.openstack.utils'] = mock.MagicMock()
# sys.modules['charmhelpers.contrib.network.ip'] = mock.MagicMock()
from __future__ import absolute_import
import unit_tests.utils as utils
import charms_openstack.sdn.ovs as ovs
class TestCharmOpenStackSDNOVS(utils.BaseTestCase):
def test_set_manager(self):
self.patch_object(ovs, 'subprocess')
ovs.set_manager('myurl')
self.subprocess.check_call.assert_called_once_with(
['ovs-vsctl', 'set-manager', 'myurl'])
def test__get_ovstbl(self):
self.patch_object(ovs, 'subprocess')
self.subprocess.check_output.return_value = 'ovstbl'
self.assertEqual(ovs._get_ovstbl(), 'ovstbl')
self.subprocess.check_output.assert_called_once_with(
['ovs-vsctl', 'get', 'Open_vSwitch', '.', '_uuid'])
def test_set_config(self):
self.patch_object(ovs, 'subprocess')
self.patch_object(ovs, '_get_ovstbl')
self._get_ovstbl.return_value = 'a_uuid'
ovs.set_config('mykey', 'myvalue', 'mytable')
self.subprocess.check_call.assert_called_once_with(
['ovs-vsctl', 'set', 'Open_vSwitch', 'a_uuid',
'mytable:mykey=myvalue'])
|
py | 1a43d1d64a35b244c8cf4b8f4c8a4f86b7aeec00 | #
# PySNMP MIB module CAMBIUM-PTP600-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CAMBIUM-PTP600-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:46:48 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
MibIdentifier, Counter64, iso, Gauge32, ModuleIdentity, enterprises, Counter32, ObjectIdentity, Integer32, Bits, Unsigned32, NotificationType, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "Counter64", "iso", "Gauge32", "ModuleIdentity", "enterprises", "Counter32", "ObjectIdentity", "Integer32", "Bits", "Unsigned32", "NotificationType", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
cambium = ModuleIdentity((1, 3, 6, 1, 4, 1, 17713))
cambium.setRevisions(('2012-12-17 13:30', '2012-05-01 11:58', '2012-02-13 12:38', '2011-11-08 13:47', '2010-12-07 13:28', '2010-11-02 13:33', '2010-07-16 15:41', '2010-04-23 10:07', '2010-02-19 18:55', '2009-07-10 15:20', '2009-02-27 17:11', '2008-12-15 12:19', '2008-06-30 10:16', '2008-02-29 09:42', '2007-07-31 18:39', '2006-11-23 14:36', '2006-07-24 10:08',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: cambium.setRevisionsDescriptions(('PTP600-10-04: Added ethernetBridgingStatus, ethernetBridgingAvailability. Added ethernetBridgingStatusAlarm, ethernetBridgingStatusAlarmTrap.', 'PTP600-10-03: make linkName & groupID read-write.', 'PTP600-10-01: Updated ORGANIZATION and CONTACT-INFO.', 'PTP600-10-00: Added new SNTP attributes, replace syslogLocalLog with syslogState. Add L2/L3 QoS attributes. Added securityLevel. Renamed vLANPriorityQueueMapping with ethernetPriorityQueueMapping. Added syslogClient and secureModeAlarm traps.', 'PTP600-09-10: Added searchState, accessMethod and groupID.', 'PTP600-09-02: Added latitude, longitude, altitude.', 'PTP600-09-01: Changed root OID. Renamed ethernet & fiber attributes & traps. Replaced vLANManagementVIDValidation with managementMode. Added linkName, siteName, hTTPSAccessEnabled, tDDSynchronizationMode. Added syslog attributes & traps. Added SNMPTrapTable for dual trap receivers.', 'PTP600-08-50: Security enhancements.', 'PTP600-08-04: Added SNMPv3 support, DFS Table, Automatic Tx Power Control. BIT STRING types now OCTET STRING.', 'PTP600-08-03: Added VLAN Priority Table, more read-only attributes, plus alarm tidy-up.', 'PTP600-08-01: Improve TDD Synchronization status and alarms.', 'PTP600-08-00: Support 4.8 & 4.9 GHz variant & 20 MHz bandwidth, Link Symmetry, TDD Synchronization.', 'PTP600-07-00: Added IEEE 802.1q & 802.1ad VLAN support.', 'PTP600-06-00: Added OOB, HTTP & telnet access controls.', 'PTP600-05-00: Added groups, attributes and notifications for diagnostic alarms.', 'PTP600-04-00: Converted to SMIv2.', 'PTP600-03-01',))
if mibBuilder.loadTexts: cambium.setLastUpdated('201212171330Z')
if mibBuilder.loadTexts: cambium.setOrganization('Cambium Networks Ltd.')
if mibBuilder.loadTexts: cambium.setContactInfo('Post: Simon Whitaker Cambium Networks Ltd. Unit B2, Linhay Business Park, Eastern Road, Ashburton, Devon. TQ13 7UP United Kingdom Phone: +44 (0) 1364 655 500 Email: [email protected]')
if mibBuilder.loadTexts: cambium.setDescription('MIB for 300Mbps non-line-of-sight (NLOS) wireless ethernet bridge.')
ptp = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 1))
ptmp = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 2))
ptp600 = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6))
dfs = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 3))
bridge = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 4))
configuration = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 5))
ethernet = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 6))
telecom = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 7))
licence = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 8))
management = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 9))
phyControl = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 10))
phyStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 12))
alarms = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 13))
smtp = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 15))
snmpControl = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 16))
sntp = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 17))
reset = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 18))
versions = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 19))
pubStats = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 20))
encryption = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 22))
tDDControl = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 23))
syslogControl = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 24))
supplementary = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 96))
ptpGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 98))
ptpTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 99))
ptpTrapPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0))
ptpCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 17713, 6, 97)).setObjects(("CAMBIUM-PTP600-MIB", "dfsGroup"), ("CAMBIUM-PTP600-MIB", "bridgeGroup"), ("CAMBIUM-PTP600-MIB", "configurationGroup"), ("CAMBIUM-PTP600-MIB", "ethernetGroup"), ("CAMBIUM-PTP600-MIB", "telecomGroup"), ("CAMBIUM-PTP600-MIB", "licenceGroup"), ("CAMBIUM-PTP600-MIB", "managementGroup"), ("CAMBIUM-PTP600-MIB", "phyControlGroup"), ("CAMBIUM-PTP600-MIB", "phyStatusGroup"), ("CAMBIUM-PTP600-MIB", "alarmsGroup"), ("CAMBIUM-PTP600-MIB", "smtpGroup"), ("CAMBIUM-PTP600-MIB", "snmpControlGroup"), ("CAMBIUM-PTP600-MIB", "sntpGroup"), ("CAMBIUM-PTP600-MIB", "resetGroup"), ("CAMBIUM-PTP600-MIB", "versionsGroup"), ("CAMBIUM-PTP600-MIB", "pubStatsGroup"), ("CAMBIUM-PTP600-MIB", "encryptionGroup"), ("CAMBIUM-PTP600-MIB", "tDDControlGroup"), ("CAMBIUM-PTP600-MIB", "syslogControlGroup"), ("CAMBIUM-PTP600-MIB", "supplementaryGroup"), ("CAMBIUM-PTP600-MIB", "notificationsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ptpCompliance = ptpCompliance.setStatus('current')
if mibBuilder.loadTexts: ptpCompliance.setDescription('The compliance statement for the Cambium PTP MIB')
dfsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 3)).setObjects(("CAMBIUM-PTP600-MIB", "dfsTableNumber"), ("CAMBIUM-PTP600-MIB", "dfsMeans"), ("CAMBIUM-PTP600-MIB", "dfsNineNinePointNinePercentiles"), ("CAMBIUM-PTP600-MIB", "dfsPeaks"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
dfsGroup = dfsGroup.setStatus('current')
if mibBuilder.loadTexts: dfsGroup.setDescription('The dfs object group.')
bridgeGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 4)).setObjects(("CAMBIUM-PTP600-MIB", "localPacketFiltering"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
bridgeGroup = bridgeGroup.setStatus('current')
if mibBuilder.loadTexts: bridgeGroup.setDescription('The bridge object group.')
configurationGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 5)).setObjects(("CAMBIUM-PTP600-MIB", "iPAddress"), ("CAMBIUM-PTP600-MIB", "subnetMask"), ("CAMBIUM-PTP600-MIB", "gatewayIPAddress"), ("CAMBIUM-PTP600-MIB", "targetMACAddress"), ("CAMBIUM-PTP600-MIB", "masterSlaveMode"), ("CAMBIUM-PTP600-MIB", "maximumTransmitPower"), ("CAMBIUM-PTP600-MIB", "antennaGain"), ("CAMBIUM-PTP600-MIB", "cableLoss"), ("CAMBIUM-PTP600-MIB", "eIRP"), ("CAMBIUM-PTP600-MIB", "ethernetCappedMaxWirelessSpeed"), ("CAMBIUM-PTP600-MIB", "channelBandwidth"), ("CAMBIUM-PTP600-MIB", "remoteIPAddress"), ("CAMBIUM-PTP600-MIB", "linkName"), ("CAMBIUM-PTP600-MIB", "siteName"), ("CAMBIUM-PTP600-MIB", "accessMethod"), ("CAMBIUM-PTP600-MIB", "groupID"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
configurationGroup = configurationGroup.setStatus('current')
if mibBuilder.loadTexts: configurationGroup.setDescription('The configuration object group.')
ethernetGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 6)).setObjects(("CAMBIUM-PTP600-MIB", "dataPortAutoNegotiation"), ("CAMBIUM-PTP600-MIB", "dataPortAutoNegAdvertisement"), ("CAMBIUM-PTP600-MIB", "dataPortAutoMdix"), ("CAMBIUM-PTP600-MIB", "dataPortStatus"), ("CAMBIUM-PTP600-MIB", "dataPortSpeedAndDuplex"), ("CAMBIUM-PTP600-MIB", "dataPortWirelessDownAlert"), ("CAMBIUM-PTP600-MIB", "useVLANForManagementInterfaces"), ("CAMBIUM-PTP600-MIB", "vLANManagementPriority"), ("CAMBIUM-PTP600-MIB", "vLANManagementVID"), ("CAMBIUM-PTP600-MIB", "managementPortStatus"), ("CAMBIUM-PTP600-MIB", "managementPortSpeedAndDuplex"), ("CAMBIUM-PTP600-MIB", "ethernetPriorityTableNumber"), ("CAMBIUM-PTP600-MIB", "l2CPPriorityTableNumber"), ("CAMBIUM-PTP600-MIB", "iPDSCPPriorityTableNumber"), ("CAMBIUM-PTP600-MIB", "mPLSTCPriorityTableNumber"), ("CAMBIUM-PTP600-MIB", "managementMode"), ("CAMBIUM-PTP600-MIB", "managementPortWirelessDownAlert"), ("CAMBIUM-PTP600-MIB", "qOSPriorityScheme"), ("CAMBIUM-PTP600-MIB", "unknownNetworkPriorityQueueMapping"), ("CAMBIUM-PTP600-MIB", "dSCPManagementPriority"), ("CAMBIUM-PTP600-MIB", "ethernetBridgingStatus"), ("CAMBIUM-PTP600-MIB", "ethernetPriorityQueueMapping"), ("CAMBIUM-PTP600-MIB", "l2CPPriorityQueueMapping"), ("CAMBIUM-PTP600-MIB", "iPDSCPPriorityQueueMapping"), ("CAMBIUM-PTP600-MIB", "mPLSTCPriorityQueueMapping"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ethernetGroup = ethernetGroup.setStatus('current')
if mibBuilder.loadTexts: ethernetGroup.setDescription('The ethernet object group.')
telecomGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 7)).setObjects(("CAMBIUM-PTP600-MIB", "telecomsInterface"), ("CAMBIUM-PTP600-MIB", "telecomsChannelAStatus"), ("CAMBIUM-PTP600-MIB", "telecomsChannelBStatus"), ("CAMBIUM-PTP600-MIB", "channelALineCode"), ("CAMBIUM-PTP600-MIB", "channelBLineCode"), ("CAMBIUM-PTP600-MIB", "channelACableLength"), ("CAMBIUM-PTP600-MIB", "channelBCableLength"), ("CAMBIUM-PTP600-MIB", "channelALoopback"), ("CAMBIUM-PTP600-MIB", "channelBLoopback"), ("CAMBIUM-PTP600-MIB", "telecomsChannelSelection"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
telecomGroup = telecomGroup.setStatus('current')
if mibBuilder.loadTexts: telecomGroup.setDescription('The telecom object group.')
licenceGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 8)).setObjects(("CAMBIUM-PTP600-MIB", "regionCode"), ("CAMBIUM-PTP600-MIB", "productVariant"), ("CAMBIUM-PTP600-MIB", "productName"), ("CAMBIUM-PTP600-MIB", "ethernetFiberSupport"), ("CAMBIUM-PTP600-MIB", "frequencyVariant"), ("CAMBIUM-PTP600-MIB", "bandwidthVariant"), ("CAMBIUM-PTP600-MIB", "constantPowerSpectralDensity"), ("CAMBIUM-PTP600-MIB", "sNMPv3Enable"), ("CAMBIUM-PTP600-MIB", "hAZLOCConfiguration"), ("CAMBIUM-PTP600-MIB", "securityLevel"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
licenceGroup = licenceGroup.setStatus('current')
if mibBuilder.loadTexts: licenceGroup.setDescription('The licence object group.')
managementGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 9)).setObjects(("CAMBIUM-PTP600-MIB", "targetRange"), ("CAMBIUM-PTP600-MIB", "rangingMode"), ("CAMBIUM-PTP600-MIB", "installStatus"), ("CAMBIUM-PTP600-MIB", "installArmState"), ("CAMBIUM-PTP600-MIB", "tFTPServerIPAddress"), ("CAMBIUM-PTP600-MIB", "tFTPServerPortNumber"), ("CAMBIUM-PTP600-MIB", "tFTPSoftwareUpgradeFileName"), ("CAMBIUM-PTP600-MIB", "tFTPStartSoftwareUpgrade"), ("CAMBIUM-PTP600-MIB", "tFTPSoftwareUpgradeStatus"), ("CAMBIUM-PTP600-MIB", "tFTPSoftwareUpgradeStatusText"), ("CAMBIUM-PTP600-MIB", "tFTPSoftwareUpgradeStatusAdditionalText"), ("CAMBIUM-PTP600-MIB", "hTTPAccessEnabled"), ("CAMBIUM-PTP600-MIB", "telnetAccessEnabled"), ("CAMBIUM-PTP600-MIB", "hTTPPortNumber"), ("CAMBIUM-PTP600-MIB", "hTTPSPortNumber"), ("CAMBIUM-PTP600-MIB", "telnetPortNumber"), ("CAMBIUM-PTP600-MIB", "hTTPSAccessEnabled"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
managementGroup = managementGroup.setStatus('current')
if mibBuilder.loadTexts: managementGroup.setDescription('The management object group.')
phyControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 10)).setObjects(("CAMBIUM-PTP600-MIB", "linkSymmetry"), ("CAMBIUM-PTP600-MIB", "linkModeOptimisation"), ("CAMBIUM-PTP600-MIB", "userConfiguredMaxModulationMode"), ("CAMBIUM-PTP600-MIB", "remoteMaximumTransmitPower"), ("CAMBIUM-PTP600-MIB", "txColorCode"), ("CAMBIUM-PTP600-MIB", "rxColorCode"), ("CAMBIUM-PTP600-MIB", "automaticTxPowerControl"), ("CAMBIUM-PTP600-MIB", "remoteRxTargetPower"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
phyControlGroup = phyControlGroup.setStatus('current')
if mibBuilder.loadTexts: phyControlGroup.setDescription('The phyControl object group.')
phyStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 12)).setObjects(("CAMBIUM-PTP600-MIB", "receivePower"), ("CAMBIUM-PTP600-MIB", "vectorError"), ("CAMBIUM-PTP600-MIB", "transmitPower"), ("CAMBIUM-PTP600-MIB", "range"), ("CAMBIUM-PTP600-MIB", "linkLoss"), ("CAMBIUM-PTP600-MIB", "receiveChannel"), ("CAMBIUM-PTP600-MIB", "transmitChannel"), ("CAMBIUM-PTP600-MIB", "receiveModulationMode"), ("CAMBIUM-PTP600-MIB", "transmitModulationMode"), ("CAMBIUM-PTP600-MIB", "receiveFreqMHz"), ("CAMBIUM-PTP600-MIB", "transmitFreqMHz"), ("CAMBIUM-PTP600-MIB", "signalStrengthRatio"), ("CAMBIUM-PTP600-MIB", "receiveFreqKHz"), ("CAMBIUM-PTP600-MIB", "transmitFreqKHz"), ("CAMBIUM-PTP600-MIB", "searchState"), ("CAMBIUM-PTP600-MIB", "rawReceivePower"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
phyStatusGroup = phyStatusGroup.setStatus('current')
if mibBuilder.loadTexts: phyStatusGroup.setDescription('The phyStatus object group.')
alarmsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 13)).setObjects(("CAMBIUM-PTP600-MIB", "unitOutOfCalibration"), ("CAMBIUM-PTP600-MIB", "encryptionEnabledMismatch"), ("CAMBIUM-PTP600-MIB", "incompatibleRegionCodes"), ("CAMBIUM-PTP600-MIB", "noWirelessChannelAvailable"), ("CAMBIUM-PTP600-MIB", "wirelessLinkDisabledWarning"), ("CAMBIUM-PTP600-MIB", "dataPortDisabledWarning"), ("CAMBIUM-PTP600-MIB", "dataPortFiberStatus"), ("CAMBIUM-PTP600-MIB", "dataPortConfigurationMismatch"), ("CAMBIUM-PTP600-MIB", "incompatibleMasterAndSlave"), ("CAMBIUM-PTP600-MIB", "tDDSynchronizationStatus"), ("CAMBIUM-PTP600-MIB", "managementPortDisabledWarning"), ("CAMBIUM-PTP600-MIB", "tDDSynchronizationAlarm"), ("CAMBIUM-PTP600-MIB", "linkModeOptimizationMismatch"), ("CAMBIUM-PTP600-MIB", "managementPortConfigurationMismatch"), ("CAMBIUM-PTP600-MIB", "secureModeAlarm"), ("CAMBIUM-PTP600-MIB", "ethernetBridgingStatusAlarm"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alarmsGroup = alarmsGroup.setStatus('current')
if mibBuilder.loadTexts: alarmsGroup.setDescription('The alarms object group.')
smtpGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 15)).setObjects(("CAMBIUM-PTP600-MIB", "sMTPEmailAlert"), ("CAMBIUM-PTP600-MIB", "sMTPServerIPAddress"), ("CAMBIUM-PTP600-MIB", "sMTPServerPortNumber"), ("CAMBIUM-PTP600-MIB", "sMTPSourceEmailAddress"), ("CAMBIUM-PTP600-MIB", "sMTPDestinationEmailAddress"), ("CAMBIUM-PTP600-MIB", "sMTPEnabledMessages"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
smtpGroup = smtpGroup.setStatus('current')
if mibBuilder.loadTexts: smtpGroup.setDescription('The smtp object group.')
snmpControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 16)).setObjects(("CAMBIUM-PTP600-MIB", "sNMPPortNumber"), ("CAMBIUM-PTP600-MIB", "sNMPCommunityString"), ("CAMBIUM-PTP600-MIB", "sNMPTrapTableNumber"), ("CAMBIUM-PTP600-MIB", "sNMPTrapVersion"), ("CAMBIUM-PTP600-MIB", "sNMPEnabledTraps"), ("CAMBIUM-PTP600-MIB", "enabledDiagnosticAlarms"), ("CAMBIUM-PTP600-MIB", "sNMPTrapIPAddress"), ("CAMBIUM-PTP600-MIB", "sNMPTrapPortNumber"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
snmpControlGroup = snmpControlGroup.setStatus('current')
if mibBuilder.loadTexts: snmpControlGroup.setDescription('The snmpControl object group.')
sntpGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 17)).setObjects(("CAMBIUM-PTP600-MIB", "sNTPState"), ("CAMBIUM-PTP600-MIB", "sNTPPollInterval"), ("CAMBIUM-PTP600-MIB", "sNTPSync"), ("CAMBIUM-PTP600-MIB", "sNTPLastSync"), ("CAMBIUM-PTP600-MIB", "systemClock"), ("CAMBIUM-PTP600-MIB", "timeZone"), ("CAMBIUM-PTP600-MIB", "daylightSaving"), ("CAMBIUM-PTP600-MIB", "sNTPPrimaryServer"), ("CAMBIUM-PTP600-MIB", "sNTPPrimaryServerDeadTime"), ("CAMBIUM-PTP600-MIB", "sNTPServerRetries"), ("CAMBIUM-PTP600-MIB", "sNTPServerTimeout"), ("CAMBIUM-PTP600-MIB", "sNTPServerTableNumber"), ("CAMBIUM-PTP600-MIB", "sNTPServerIPAddress"), ("CAMBIUM-PTP600-MIB", "sNTPServerPortNumber"), ("CAMBIUM-PTP600-MIB", "sNTPServerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
sntpGroup = sntpGroup.setStatus('current')
if mibBuilder.loadTexts: sntpGroup.setDescription('The sntp object group.')
resetGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 18)).setObjects(("CAMBIUM-PTP600-MIB", "systemReset"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
resetGroup = resetGroup.setStatus('current')
if mibBuilder.loadTexts: resetGroup.setDescription('The reset object group.')
versionsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 19)).setObjects(("CAMBIUM-PTP600-MIB", "softwareVersion"), ("CAMBIUM-PTP600-MIB", "hardwareVersion"), ("CAMBIUM-PTP600-MIB", "secondarySoftwareVersion"), ("CAMBIUM-PTP600-MIB", "bootVersion"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
versionsGroup = versionsGroup.setStatus('current')
if mibBuilder.loadTexts: versionsGroup.setDescription('The versions object group.')
pubStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 20)).setObjects(("CAMBIUM-PTP600-MIB", "receiveDataRate"), ("CAMBIUM-PTP600-MIB", "transmitDataRate"), ("CAMBIUM-PTP600-MIB", "aggregateDataRate"), ("CAMBIUM-PTP600-MIB", "wirelessLinkAvailability"), ("CAMBIUM-PTP600-MIB", "wirelessLinkStatus"), ("CAMBIUM-PTP600-MIB", "byteErrorRatio"), ("CAMBIUM-PTP600-MIB", "receiveModulationModeDetail"), ("CAMBIUM-PTP600-MIB", "ethernetBridgingAvailability"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
pubStatsGroup = pubStatsGroup.setStatus('current')
if mibBuilder.loadTexts: pubStatsGroup.setDescription('The pubStats object group.')
encryptionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 22)).setObjects(("CAMBIUM-PTP600-MIB", "encryptionAlgorithm"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
encryptionGroup = encryptionGroup.setStatus('current')
if mibBuilder.loadTexts: encryptionGroup.setDescription('The encryption object group.')
tDDControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 23)).setObjects(("CAMBIUM-PTP600-MIB", "tDDSynchronizationMode"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tDDControlGroup = tDDControlGroup.setStatus('current')
if mibBuilder.loadTexts: tDDControlGroup.setDescription('The tDDControl object group.')
syslogControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 24)).setObjects(("CAMBIUM-PTP600-MIB", "syslogClient"), ("CAMBIUM-PTP600-MIB", "syslogState"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
syslogControlGroup = syslogControlGroup.setStatus('current')
if mibBuilder.loadTexts: syslogControlGroup.setDescription('The syslogControl object group.')
supplementaryGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 96)).setObjects(("CAMBIUM-PTP600-MIB", "longitude"), ("CAMBIUM-PTP600-MIB", "latitude"), ("CAMBIUM-PTP600-MIB", "altitude"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
supplementaryGroup = supplementaryGroup.setStatus('current')
if mibBuilder.loadTexts: supplementaryGroup.setDescription('The supplementary object group.')
notificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 17713, 6, 98, 99)).setObjects(("CAMBIUM-PTP600-MIB", "dfsChannelChangeTrap"), ("CAMBIUM-PTP600-MIB", "dfsImpulsiveInterferenceTrap"), ("CAMBIUM-PTP600-MIB", "dataPortStatusTrap"), ("CAMBIUM-PTP600-MIB", "telecomsChannelAStatusTrap"), ("CAMBIUM-PTP600-MIB", "telecomsChannelBStatusTrap"), ("CAMBIUM-PTP600-MIB", "channelALoopbackTrap"), ("CAMBIUM-PTP600-MIB", "channelBLoopbackTrap"), ("CAMBIUM-PTP600-MIB", "regionCodeTrap"), ("CAMBIUM-PTP600-MIB", "installStatusTrap"), ("CAMBIUM-PTP600-MIB", "installArmStateTrap"), ("CAMBIUM-PTP600-MIB", "unitOutOfCalibrationTrap"), ("CAMBIUM-PTP600-MIB", "encryptionEnabledMismatchTrap"), ("CAMBIUM-PTP600-MIB", "incompatibleRegionCodesTrap"), ("CAMBIUM-PTP600-MIB", "noWirelessChannelAvailableTrap"), ("CAMBIUM-PTP600-MIB", "wirelessLinkDisabledWarningTrap"), ("CAMBIUM-PTP600-MIB", "dataPortDisabledWarningTrap"), ("CAMBIUM-PTP600-MIB", "dataPortFiberStatusTrap"), ("CAMBIUM-PTP600-MIB", "dataPortConfigurationMismatchTrap"), ("CAMBIUM-PTP600-MIB", "incompatibleMasterAndSlaveTrap"), ("CAMBIUM-PTP600-MIB", "sNTPSyncTrap"), ("CAMBIUM-PTP600-MIB", "tDDSynchronizationAlarmTrap"), ("CAMBIUM-PTP600-MIB", "managementPortStatusTrap"), ("CAMBIUM-PTP600-MIB", "managementPortDisabledWarningTrap"), ("CAMBIUM-PTP600-MIB", "linkModeOptimizationMismatchTrap"), ("CAMBIUM-PTP600-MIB", "managementPortConfigurationMismatchTrap"), ("CAMBIUM-PTP600-MIB", "syslogStateTrap"), ("CAMBIUM-PTP600-MIB", "syslogLocalNearlyFullTrap"), ("CAMBIUM-PTP600-MIB", "syslogLocalWrappedTrap"), ("CAMBIUM-PTP600-MIB", "syslogClientTrap"), ("CAMBIUM-PTP600-MIB", "secureModeAlarmTrap"), ("CAMBIUM-PTP600-MIB", "ethernetBridgingStatusAlarmTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
notificationsGroup = notificationsGroup.setStatus('current')
if mibBuilder.loadTexts: notificationsGroup.setDescription('The notifications group.')
dfsTable = MibTable((1, 3, 6, 1, 4, 1, 17713, 6, 3, 2), )
if mibBuilder.loadTexts: dfsTable.setStatus('current')
if mibBuilder.loadTexts: dfsTable.setDescription(' A table storing a snapshot of various DFS measurements for each channel')
dfsTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 17713, 6, 3, 2, 1), ).setIndexNames((0, "CAMBIUM-PTP600-MIB", "dfsTableIndex"))
if mibBuilder.loadTexts: dfsTableEntry.setStatus('current')
if mibBuilder.loadTexts: dfsTableEntry.setDescription('Table DfsTable')
ethernetPriorityTable = MibTable((1, 3, 6, 1, 4, 1, 17713, 6, 6, 15), )
if mibBuilder.loadTexts: ethernetPriorityTable.setStatus('current')
if mibBuilder.loadTexts: ethernetPriorityTable.setDescription(' A priority queue mapping table. This is a list of packet queues indexed by Ethernet priority.')
ethernetPriorityTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 17713, 6, 6, 15, 1), ).setIndexNames((0, "CAMBIUM-PTP600-MIB", "ethernetPriorityTableIndex"))
if mibBuilder.loadTexts: ethernetPriorityTableEntry.setStatus('current')
if mibBuilder.loadTexts: ethernetPriorityTableEntry.setDescription('Table EthernetPriorityTable')
l2CPPriorityTable = MibTable((1, 3, 6, 1, 4, 1, 17713, 6, 6, 17), )
if mibBuilder.loadTexts: l2CPPriorityTable.setStatus('current')
if mibBuilder.loadTexts: l2CPPriorityTable.setDescription(' A priority queue mapping table. This is a list of packet queues indexed by L2CP priority.')
l2CPPriorityTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 17713, 6, 6, 17, 1), ).setIndexNames((0, "CAMBIUM-PTP600-MIB", "l2CPPriorityTableIndex"))
if mibBuilder.loadTexts: l2CPPriorityTableEntry.setStatus('current')
if mibBuilder.loadTexts: l2CPPriorityTableEntry.setDescription('Table L2CPPriorityTable')
iPDSCPPriorityTable = MibTable((1, 3, 6, 1, 4, 1, 17713, 6, 6, 19), )
if mibBuilder.loadTexts: iPDSCPPriorityTable.setStatus('current')
if mibBuilder.loadTexts: iPDSCPPriorityTable.setDescription(' A priority queue mapping table. This is a list of packet queues indexed by L3 IP DSCP priority.')
iPDSCPPriorityTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 17713, 6, 6, 19, 1), ).setIndexNames((0, "CAMBIUM-PTP600-MIB", "iPDSCPPriorityTableIndex"))
if mibBuilder.loadTexts: iPDSCPPriorityTableEntry.setStatus('current')
if mibBuilder.loadTexts: iPDSCPPriorityTableEntry.setDescription('Table IPDSCPPriorityTable')
mPLSTCPriorityTable = MibTable((1, 3, 6, 1, 4, 1, 17713, 6, 6, 21), )
if mibBuilder.loadTexts: mPLSTCPriorityTable.setStatus('current')
if mibBuilder.loadTexts: mPLSTCPriorityTable.setDescription(' A priority queue mapping table. This is a list of priority queues indexed by MPLS TC priority.')
mPLSTCPriorityTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 17713, 6, 6, 21, 1), ).setIndexNames((0, "CAMBIUM-PTP600-MIB", "mPLSTCPriorityTableIndex"))
if mibBuilder.loadTexts: mPLSTCPriorityTableEntry.setStatus('current')
if mibBuilder.loadTexts: mPLSTCPriorityTableEntry.setDescription('Table MPLSTCPriorityTable')
sNMPTrapTable = MibTable((1, 3, 6, 1, 4, 1, 17713, 6, 16, 4), )
if mibBuilder.loadTexts: sNMPTrapTable.setStatus('current')
if mibBuilder.loadTexts: sNMPTrapTable.setDescription(' SNMP trap configuration table.')
sNMPTrapTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 17713, 6, 16, 4, 1), ).setIndexNames((0, "CAMBIUM-PTP600-MIB", "sNMPTrapTableIndex"))
if mibBuilder.loadTexts: sNMPTrapTableEntry.setStatus('current')
if mibBuilder.loadTexts: sNMPTrapTableEntry.setDescription('Table SNMPTrapTable')
sNTPServerTable = MibTable((1, 3, 6, 1, 4, 1, 17713, 6, 17, 15), )
if mibBuilder.loadTexts: sNTPServerTable.setStatus('current')
if mibBuilder.loadTexts: sNTPServerTable.setDescription('')
sNTPServerTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 17713, 6, 17, 15, 1), ).setIndexNames((0, "CAMBIUM-PTP600-MIB", "sNTPServerTableIndex"))
if mibBuilder.loadTexts: sNTPServerTableEntry.setStatus('current')
if mibBuilder.loadTexts: sNTPServerTableEntry.setDescription('Table SNTPServerTable')
dfsTableNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 3, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfsTableNumber.setStatus('current')
if mibBuilder.loadTexts: dfsTableNumber.setDescription('Number of entries in the dfsTable')
dfsTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 63)))
if mibBuilder.loadTexts: dfsTableIndex.setStatus('current')
if mibBuilder.loadTexts: dfsTableIndex.setDescription('used to index values in the dfsTable.')
dfsMeans = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 3, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfsMeans.setStatus('current')
if mibBuilder.loadTexts: dfsMeans.setDescription('Snapshot of DFS means')
dfsNineNinePointNinePercentiles = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 3, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfsNineNinePointNinePercentiles.setStatus('current')
if mibBuilder.loadTexts: dfsNineNinePointNinePercentiles.setDescription('Snapshot of DFS 99.9 percentiles')
dfsPeaks = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 3, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfsPeaks.setStatus('current')
if mibBuilder.loadTexts: dfsPeaks.setDescription('Snapshot of DFS peaks')
localPacketFiltering = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: localPacketFiltering.setStatus('current')
if mibBuilder.loadTexts: localPacketFiltering.setDescription("When Local Packet Filtering is Enabled, the bridge learns the source MAC addresses of devices transmitting Ethernet packets on the local Ethernet network, and only bridges packets to the remote unit if the destination MAC address has not been learnt as a 'local' device. When Local Packet Filtering is Disabled then the bridge does not learn the source MAC addresses of devices transmitting Ethernet packets on the local Ethernet network, and bridges ALL Ethernet packets received to the remote unit. Local Packet Filtering should be enabled if no external routing hardware is present")
iPAddress = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: iPAddress.setStatus('current')
if mibBuilder.loadTexts: iPAddress.setDescription('Internet protocol (IP) address. This address is used by the family of Internet protocols to uniquely identify the unit on a network. NB: A system reboot is required to activate changes to this attribute.')
subnetMask = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask.setStatus('current')
if mibBuilder.loadTexts: subnetMask.setDescription('A subnet allows the flow of network traffic between hosts to be segregated based on a network configuration. By organizing hosts into logical groups, subnetting can improve network security and performance. NB: A system reboot is required to activate changes to this attribute')
gatewayIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gatewayIPAddress.setStatus('current')
if mibBuilder.loadTexts: gatewayIPAddress.setDescription('The IP address of a computer on the current network that is currently acting as a network gateway. A gateway acts as an entrance / exit to packets from / to other networks. NB: A system reboot is required to activate changes to this attribute')
targetMACAddress = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly")
if mibBuilder.loadTexts: targetMACAddress.setStatus('current')
if mibBuilder.loadTexts: targetMACAddress.setDescription('MAC address of the PTP wireless unit forming the other end of the PTP link')
masterSlaveMode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("master", 0), ("slave", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: masterSlaveMode.setStatus('current')
if mibBuilder.loadTexts: masterSlaveMode.setDescription('The PTP wireless link operates using a master and slave relationship')
maximumTransmitPower = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-15, 27))).setMaxAccess("readonly")
if mibBuilder.loadTexts: maximumTransmitPower.setStatus('current')
if mibBuilder.loadTexts: maximumTransmitPower.setDescription('Maximum transmit power the wireless unit is permitted to use when establishing and maintaining the wireless link')
antennaGain = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 610))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: antennaGain.setStatus('current')
if mibBuilder.loadTexts: antennaGain.setDescription('Antenna Gain. Expressed in 10ths of dBi. NB: This attribute is ignored for variants with an integral antenna.')
cableLoss = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cableLoss.setStatus('current')
if mibBuilder.loadTexts: cableLoss.setDescription('Loss in the cable between the ODU and the antenna. Expressed in 10ths of dB. NB: This attribute is ignored for variants with an integral antenna.')
eIRP = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eIRP.setStatus('current')
if mibBuilder.loadTexts: eIRP.setDescription('Effective Isotropic Radiated Power (EIRP) describes the strength of the radio signal leaving the wireless unit. Expressed in 10ths of dBm')
ethernetCappedMaxWirelessSpeed = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ethernetCappedMaxWirelessSpeed.setStatus('current')
if mibBuilder.loadTexts: ethernetCappedMaxWirelessSpeed.setDescription('Ethernet frames will be discarded in the ODU if the data rate over the wireless link exceeds the Ethernet link speed at the local or remote Data port. The Ethernet Capped Max Wireless Speed control limits the wireless data rate based on the local and remote Ethernet link speed. Set this control to Enabled if either ODU is connected to an Ethernet link operating at less than 1000 Mbps. NB: This attribute is automatically disabled when the installation agent is armed. To disarm the installation agent use the installation wizard.')
channelBandwidth = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("bw30MHz", 0), ("bw15MHz", 1), ("bw10MHz", 2), ("bw5MHz", 3), ("bw20MHz", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelBandwidth.setStatus('current')
if mibBuilder.loadTexts: channelBandwidth.setDescription('This control sets the bandwidth of the transmit and receive radio channels')
remoteIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 12), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: remoteIPAddress.setStatus('current')
if mibBuilder.loadTexts: remoteIPAddress.setDescription('IP Address of the peer wireless unit')
linkName = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 14), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: linkName.setStatus('current')
if mibBuilder.loadTexts: linkName.setDescription('Name of the PTP link allocated by the System Administrator. Used to establish a connection with the correct PTP wireless unit at the other end of the link. NOTE: The link name MUST be the same at both ends of the PTP link.')
siteName = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 15), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: siteName.setStatus('current')
if mibBuilder.loadTexts: siteName.setDescription('This field contains a user-provided description of the site name together with any additional notes')
accessMethod = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("linkAccess", 0), ("linkNameAccess", 1), ("groupAccess", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: accessMethod.setStatus('current')
if mibBuilder.loadTexts: accessMethod.setDescription('ODUs must be configured in pairs before a link can be established. The Access Method attribute determines how the paired ODUs will recognise each other. For the Link Access method, each ODU must be configured with Target MAC Address equal to the MAC Address of the other unit. For the Link Name Access method, both units must be configured with the same Link Name. For the Group Access method, both units must be configured with the same Group ID.')
groupID = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 5, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: groupID.setStatus('current')
if mibBuilder.loadTexts: groupID.setDescription('When Access Method is set to Group Access, the Group ID identifies all the units belonging to the same group')
dataPortAutoNegotiation = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dataPortAutoNegotiation.setStatus('current')
if mibBuilder.loadTexts: dataPortAutoNegotiation.setDescription('This controls whether the local Ethernet interface configuration is automatically negotiated or forced. Do not use a forced setting together with auto negotiation at the Ethernet link partner. If you want to run your Ethernet link at a fixed speed and duplex then you would normally leave auto negotiation enabled and only enable the single Auto Neg Advertisement option that you want the link to run in - FORCING ETHERNET CONFIGURATION IS A LAST RESORT WHICH SHOULD ONLY BE PERFORMED IF YOU ARE HAVING PROBLEMS WITH AUTO NEGOTIATION - AND YOU MUST ENSURE THAT YOU CONFIGURE BOTH THIS UNIT AND THE ETHERNET LINK PARTNER TO WHICH IT IS CONNECTED IDENTICALLY (THE LINK PARTNER MUST NOT HAVE AUTO NEGOTIATION ENABLED EITHER, OTHERWISE A DUPLEX MISMATCH WILL OCCUR). NB: The new setting will only take effect after a system reboot.')
dataPortAutoNegAdvertisement = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 2), Bits().clone(namedValues=NamedValues(("negInvalid", 2), ("neg10MbpsHalfDuplex", 3), ("neg10MbpsFullDuplex", 4), ("neg100MbpsHalfDuplex", 5), ("neg100MbpsFullDuplex", 6), ("neg1000MbpsFullDuplex", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dataPortAutoNegAdvertisement.setStatus('current')
if mibBuilder.loadTexts: dataPortAutoNegAdvertisement.setDescription('This sets the different Ethernet configurations that will be advertised during auto negotiation. Note the new setting will only take effect after a system reboot. WARNING: Duplex mismatch may result if both ends of the Ethernet link do not have the same settings. Do not auto negotiation together with a fixed setting at the Ethernet link partner. WARNING: Make sure that you select option(s) that you know your connected equipment can cater for!')
dataPortAutoMdix = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dataPortAutoMdix.setStatus('current')
if mibBuilder.loadTexts: dataPortAutoMdix.setDescription('This enables/disables the Auto MDI/MDIX capability. NB: The new setting will only take effect after a system reboot')
dataPortStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("down", 0), ("copperLinkUp", 1), ("fiberLinkUp", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataPortStatus.setStatus('current')
if mibBuilder.loadTexts: dataPortStatus.setDescription('Current status of the Ethernet link. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
dataPortSpeedAndDuplex = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("speed1000MbpsFullDuplex", 0), ("speed100MbpsFullDuplex", 1), ("speed100MbpsHalfDuplex", 2), ("speed10MbpsFullDuplex", 3), ("speed10MbpsHalfDuplex", 4), ("speedUnknown6", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataPortSpeedAndDuplex.setStatus('current')
if mibBuilder.loadTexts: dataPortSpeedAndDuplex.setDescription('The current speed and duplex of the Ethernet link')
dataPortWirelessDownAlert = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dataPortWirelessDownAlert.setStatus('current')
if mibBuilder.loadTexts: dataPortWirelessDownAlert.setDescription('When enabled, this causes the Ethernet link to be dropped briefly when the wireless link drops. This is so that spanning tree algorithms can quickly detect that there is a problem.')
useVLANForManagementInterfaces = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("noVLANTagging", 0), ("iEEE8021QTaggedCTagType8100", 1), ("iEEE8021adTaggedSTagorBTagType88a8", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: useVLANForManagementInterfaces.setStatus('current')
if mibBuilder.loadTexts: useVLANForManagementInterfaces.setDescription('This controls whether the management interfaces (WWW/SNMP/SMTP/SNTP) use VLAN tags or not. NB: The new setting will only take effect after a system reboot')
vLANManagementPriority = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("p0", 0), ("p1", 1), ("p2", 2), ("p3", 3), ("p4", 4), ("p5", 5), ("p6", 6), ("p7", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vLANManagementPriority.setStatus('current')
if mibBuilder.loadTexts: vLANManagementPriority.setDescription('This VLAN Priority (0-7) will be included in packets generated by the management interfaces. NB: The new setting will only take effect after a system reboot')
vLANManagementVID = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vLANManagementVID.setStatus('current')
if mibBuilder.loadTexts: vLANManagementVID.setDescription('This VLAN VID (0-4094) will be included in packets generated by the management interfaces. Note the new setting will only take effect after a system reboot')
managementPortStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("down", 0), ("copperLinkUp", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: managementPortStatus.setStatus('current')
if mibBuilder.loadTexts: managementPortStatus.setDescription('Current status of the out-of-band management link. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
managementPortSpeedAndDuplex = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("speed100MbpsFullDuplex", 1), ("speed100MbpsHalfDuplex", 2), ("speed10MbpsFullDuplex", 3), ("speed10MbpsHalfDuplex", 4), ("speedUnknown6", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: managementPortSpeedAndDuplex.setStatus('current')
if mibBuilder.loadTexts: managementPortSpeedAndDuplex.setDescription('The current speed and duplex of the out-of-band management link')
ethernetPriorityTableNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(9, 9))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ethernetPriorityTableNumber.setStatus('current')
if mibBuilder.loadTexts: ethernetPriorityTableNumber.setDescription('Number of entries in the vLANPriorityTable.')
l2CPPriorityTableNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 5))).setMaxAccess("readonly")
if mibBuilder.loadTexts: l2CPPriorityTableNumber.setStatus('current')
if mibBuilder.loadTexts: l2CPPriorityTableNumber.setDescription('Number of entries in the l2CPPriorityTable.')
iPDSCPPriorityTableNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(64, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: iPDSCPPriorityTableNumber.setStatus('current')
if mibBuilder.loadTexts: iPDSCPPriorityTableNumber.setDescription('Number of entries in the iPDSCPPriorityTable.')
mPLSTCPriorityTableNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(8, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mPLSTCPriorityTableNumber.setStatus('current')
if mibBuilder.loadTexts: mPLSTCPriorityTableNumber.setDescription('Number of entries in the mPLSTCPriorityTable.')
managementMode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("outofBandLocal", 0), ("outofBand", 1), ("inBand", 2), ("inBandLocal", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: managementMode.setStatus('current')
if mibBuilder.loadTexts: managementMode.setDescription("Out-of-Band Management (only available on certain hardware models): End-to-end out-of-band mode in which the management agent can be reached from the management port at the local ODU and (assuming that the wireless link is established) the management port at the remote ODU. Out-of-Band Local Management (only available on certain hardware models): Similar to the standard Out-of-Band mode, except that management frames are not forwarded over the wireless link. In-Band Management (default): The management agent can be reached from the data port at the local ODU and (assuming that the wireless link is established) the data port at the remote ODU. In this mode the management port is disabled. In-Band Local Management (only available when 'Use VLAN For Management Interfaces' configures a VLAN tag): Similar to the standard In-Band mode, except that management frames are not forwarded over the wireless link.")
managementPortWirelessDownAlert = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: managementPortWirelessDownAlert.setStatus('current')
if mibBuilder.loadTexts: managementPortWirelessDownAlert.setDescription('When enabled, this causes the management link to be dropped briefly when the wireless link drops. This is so that spanning tree algorithms can quickly detect that there is a problem.')
qOSPriorityScheme = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ethernet", 0), ("iPMPLS", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: qOSPriorityScheme.setStatus('current')
if mibBuilder.loadTexts: qOSPriorityScheme.setDescription('Selects classification based on fields in the Ethernet header (Layer 2) or in the Network header (Layer 3). The unit recognises two network layer protocols: IP and MPLS')
unknownNetworkPriorityQueueMapping = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("q0", 0), ("q1", 1), ("q2", 2), ("q3", 3), ("q4", 4), ("q5", 5), ("q6", 6), ("q7", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: unknownNetworkPriorityQueueMapping.setStatus('current')
if mibBuilder.loadTexts: unknownNetworkPriorityQueueMapping.setDescription('Configures the classification of unknown network protocols to an egress queue at the wireless port. Unknown means something other than IP and MPLS')
dSCPManagementPriority = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dSCPManagementPriority.setStatus('current')
if mibBuilder.loadTexts: dSCPManagementPriority.setDescription('This Differentiated Services Code Point value will be inserted in the IP Header of all IP Datagrams transmitted by the management interfaces')
ethernetBridgingStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 6, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ethernetBridgingStatus.setStatus('current')
if mibBuilder.loadTexts: ethernetBridgingStatus.setDescription('Current status of Ethernet packet bridging over the wireless link')
ethernetPriorityTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 6, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("p0", 1), ("p1", 2), ("p2", 3), ("p3", 4), ("p4", 5), ("p5", 6), ("p6", 7), ("p7", 8), ("untagged", 9))))
if mibBuilder.loadTexts: ethernetPriorityTableIndex.setStatus('current')
if mibBuilder.loadTexts: ethernetPriorityTableIndex.setDescription('Ethernet priority flag, used to index values in the ethernetPriorityTable')
ethernetPriorityQueueMapping = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 6, 15, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("q0", 0), ("q1", 1), ("q2", 2), ("q3", 3), ("q4", 4), ("q5", 5), ("q6", 6), ("q7", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ethernetPriorityQueueMapping.setStatus('current')
if mibBuilder.loadTexts: ethernetPriorityQueueMapping.setDescription('Configures the classification of this Ethernet priority (also known as IEEE 802.1p value) to an egress queue at the wireless port')
l2CPPriorityTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 6, 17, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("bridge", 1), ("mRP", 2), ("cFM", 3), ("rAPS", 4), ("eAPS", 5))))
if mibBuilder.loadTexts: l2CPPriorityTableIndex.setStatus('current')
if mibBuilder.loadTexts: l2CPPriorityTableIndex.setDescription('L2CP priority flag, used to index values in the l2CPPriorityTable')
l2CPPriorityQueueMapping = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 6, 17, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("q0", 0), ("q1", 1), ("q2", 2), ("q3", 3), ("q4", 4), ("q5", 5), ("q6", 6), ("q7", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: l2CPPriorityQueueMapping.setStatus('current')
if mibBuilder.loadTexts: l2CPPriorityQueueMapping.setDescription('Configures the classification of this layer two control protocol (L2CP) to an egress queue at the wireless port')
iPDSCPPriorityTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 6, 19, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64)))
if mibBuilder.loadTexts: iPDSCPPriorityTableIndex.setStatus('current')
if mibBuilder.loadTexts: iPDSCPPriorityTableIndex.setDescription('IP DSCP priority flag, used to index values in the iPDSCPPriorityTable')
iPDSCPPriorityQueueMapping = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 6, 19, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("q0", 0), ("q1", 1), ("q2", 2), ("q3", 3), ("q4", 4), ("q5", 5), ("q6", 6), ("q7", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: iPDSCPPriorityQueueMapping.setStatus('current')
if mibBuilder.loadTexts: iPDSCPPriorityQueueMapping.setDescription('Configures the classification of this IP differentiated services code point (DSCP) value to an egress queue at the wireless port')
mPLSTCPriorityTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 6, 21, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)))
if mibBuilder.loadTexts: mPLSTCPriorityTableIndex.setStatus('current')
if mibBuilder.loadTexts: mPLSTCPriorityTableIndex.setDescription('MPLS TC priority flag, used to index values in the mPLSTCPriorityTable')
mPLSTCPriorityQueueMapping = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 6, 21, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("q0", 0), ("q1", 1), ("q2", 2), ("q3", 3), ("q4", 4), ("q5", 5), ("q6", 6), ("q7", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mPLSTCPriorityQueueMapping.setStatus('current')
if mibBuilder.loadTexts: mPLSTCPriorityQueueMapping.setDescription('Configures the classification of this MPLS traffic class (the field formerly known as EXP) to an egress queue at the wireless port')
telecomsInterface = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("e1", 1), ("t1", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: telecomsInterface.setStatus('current')
if mibBuilder.loadTexts: telecomsInterface.setDescription("The line interface type. If this is set to 'None' then the telecoms interfaces are disabled, and the telecom group attributes will be ignored.")
telecomsChannelAStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 0), ("noSignalLocal", 1), ("noSignalRemote", 2), ("noSignalLocalandRemote", 3), ("up", 4), ("remoteTiming", 5), ("noSignalLocalandRemoteTiming", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: telecomsChannelAStatus.setStatus('current')
if mibBuilder.loadTexts: telecomsChannelAStatus.setDescription('Current status of telecoms channel A. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
telecomsChannelBStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 0), ("noSignalLocal", 1), ("noSignalRemote", 2), ("noSignalLocalandRemote", 3), ("up", 4), ("remoteTiming", 5), ("noSignalLocalandRemoteTiming", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: telecomsChannelBStatus.setStatus('current')
if mibBuilder.loadTexts: telecomsChannelBStatus.setDescription('Current status of telecoms channel B. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
channelALineCode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("aMI", 0), ("b8ZSHDB3", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelALineCode.setStatus('current')
if mibBuilder.loadTexts: channelALineCode.setDescription('The line code setting of the telecoms interface. This must match the setting of the device connected to this interface')
channelBLineCode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("aMI", 0), ("b8ZSHDB3", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelBLineCode.setStatus('current')
if mibBuilder.loadTexts: channelBLineCode.setDescription('The line code setting of the telecoms interface. This must match the setting of the device connected to this interface')
channelACableLength = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("feet133", 0), ("feet266", 1), ("feet399", 2), ("feet533", 3), ("feet655", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelACableLength.setStatus('current')
if mibBuilder.loadTexts: channelACableLength.setDescription('The length of the cable connected to the telecoms interface, measured in feet')
channelBCableLength = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("feet133", 0), ("feet266", 1), ("feet399", 2), ("feet533", 3), ("feet655", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelBCableLength.setStatus('current')
if mibBuilder.loadTexts: channelBCableLength.setDescription('The length of the cable connected to the telecoms interface, measured in feet')
channelALoopback = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("copper", 1), ("wireless", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelALoopback.setStatus('current')
if mibBuilder.loadTexts: channelALoopback.setDescription("The loopback status of telecoms channel A. This is intended for installation testing and should be set to 'None' for normal operation. The wire connections to a unit can be tested by applying a 'Copper' loopback to the local unit. The wireless connection to the remote unit can be tested by applying a 'Wireless' loopback to the remote unit with no loopback on the local unit. NB: a change of state may generate an SNMP trap and/or SMTP email alert")
channelBLoopback = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("copper", 1), ("wireless", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelBLoopback.setStatus('current')
if mibBuilder.loadTexts: channelBLoopback.setDescription("The loopback status of telecoms channel B. This is intended for installation testing and should be set to 'None' for normal operation. The wire connections to a unit can be tested by applying a 'Copper' loopback to the local unit. The wireless connection to the remote unit can be tested by applying a 'Wireless' loopback to the remote unit with no loopback on the local unit. NB: a change of state may generate an SNMP trap and/or SMTP email alert")
telecomsChannelSelection = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 7, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("channelAOnly", 0), ("channelsAandB", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: telecomsChannelSelection.setStatus('current')
if mibBuilder.loadTexts: telecomsChannelSelection.setDescription('Indicates which telecoms channels have been enabled. If only Channel A has been enabled, then the Channel B attributes will be ignored.')
regionCode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31))).clone(namedValues=NamedValues(("regionCodeInvalid", 0), ("regionCode1", 1), ("regionCode2", 2), ("regionCode3", 3), ("regionCode4", 4), ("regionCode5", 5), ("regionCode6", 6), ("regionCode7", 7), ("regionCode8", 8), ("regionCode9", 9), ("regionCode10", 10), ("regionCode11", 11), ("regionCode12", 12), ("regionCode13", 13), ("regionCode14", 14), ("regionCode15", 15), ("regionCode16", 16), ("regionCode17", 17), ("regionCode18", 18), ("regionCode19", 19), ("regionCode20", 20), ("regionCode21", 21), ("regionCode22", 22), ("regionCode23", 23), ("regionCode24", 24), ("regionCode25", 25), ("regionCode26", 26), ("regionCode27", 27), ("regionCode28", 28), ("regionCode29", 29), ("regionCode30", 30), ("regionCode31", 31)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: regionCode.setStatus('current')
if mibBuilder.loadTexts: regionCode.setDescription('The region code prohibits the wireless unit from operating outside the regulated limits. An invalid region code indicates a corrupted license key. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
productVariant = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))).clone(namedValues=NamedValues(("pTPxx400Full", 0), ("pTPxx400Deprecated1", 1), ("pTPxx400Deprecated2", 2), ("pTPxx400Lite", 3), ("spare1", 4), ("pTPxx300", 5), ("spare2", 6), ("spare3", 7), ("pTPxx500FullDeprecated", 8), ("pTPxx500LiteDeprecated", 9), ("pTPxx500", 10), ("pTPxx600Lite", 11), ("pTPxx600Full", 12), ("spare5", 13), ("spare6", 14), ("pTP800", 15)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: productVariant.setStatus('current')
if mibBuilder.loadTexts: productVariant.setDescription('The product variant')
productName = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: productName.setStatus('current')
if mibBuilder.loadTexts: productName.setDescription('Name of the product variant')
ethernetFiberSupport = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ethernetFiberSupport.setStatus('current')
if mibBuilder.loadTexts: ethernetFiberSupport.setDescription('Ethernet fiber support availability on this wireless unit')
frequencyVariant = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("freq5800MHz", 0), ("freq5400MHz", 1), ("freq4900MHz", 2), ("freq2500MHz", 3), ("freq5800MHz2", 4), ("freq5400MHz2", 5), ("freq4500MHz", 6), ("freq5900MHz", 7), ("freq5200MHz", 8), ("freq5100MHz", 9), ("freq4800MHz", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: frequencyVariant.setStatus('current')
if mibBuilder.loadTexts: frequencyVariant.setDescription('Frequency variant of the wireless unit')
bandwidthVariant = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("bw30MHz", 0), ("bw15MHz", 1), ("bw10MHz", 2), ("bw5MHz", 3), ("bw20MHz", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bandwidthVariant.setStatus('current')
if mibBuilder.loadTexts: bandwidthVariant.setDescription('Bandwidth variant of the wireless unit')
constantPowerSpectralDensity = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("constant", 0), ("fullPower", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: constantPowerSpectralDensity.setStatus('current')
if mibBuilder.loadTexts: constantPowerSpectralDensity.setDescription('Constant power spectral density mode control')
sNMPv3Enable = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sNMPv3Enable.setStatus('current')
if mibBuilder.loadTexts: sNMPv3Enable.setDescription('SNMPv3 availability control')
hAZLOCConfiguration = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("nonHAZLOCUnit", 0), ("gasGroupA", 1), ("gasGroupB", 2), ("gasGroupC", 3), ("gasGroupD", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hAZLOCConfiguration.setStatus('current')
if mibBuilder.loadTexts: hAZLOCConfiguration.setDescription('The HAZLOC/ATEX Configuration of this unit. The Gas Group is configured by the license key.')
securityLevel = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 8, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("normal", 0), ("fIPS", 1), ("uCAPL", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: securityLevel.setStatus('current')
if mibBuilder.loadTexts: securityLevel.setDescription('The maximum configurable security level')
targetRange = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: targetRange.setStatus('current')
if mibBuilder.loadTexts: targetRange.setDescription('Initial target range used in installation +/-1, expressed in tenths of a kilometer. When Auto-Ranging this must be set to 0')
rangingMode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("auto0to40km", 0), ("auto0to100km", 1), ("auto0to200km", 2), ("targetRange", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rangingMode.setStatus('current')
if mibBuilder.loadTexts: rangingMode.setDescription('Type of ranging to use during installation, either Auto-Ranging or use a selectable Target Range')
installStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ok", 0), ("wrongPeer", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: installStatus.setStatus('current')
if mibBuilder.loadTexts: installStatus.setDescription('A non-zero value indicates that signalling was received with the wrong MAC address or a mismatched link name. NB: It is very unusual to detect this, because units with mis-configured Target MAC Address will normally fail to establish a wireless link. However, rare circumstances may establish a partial wireless link and detect this situation. NB: A non-zero value on start-up, or a change of value during operation, may generate an SNMP trap and/or SMTP email alert')
installArmState = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disarmed", 0), ("armed", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: installArmState.setStatus('current')
if mibBuilder.loadTexts: installArmState.setDescription('Indicates if the unit is being installed. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
tFTPServerIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tFTPServerIPAddress.setStatus('current')
if mibBuilder.loadTexts: tFTPServerIPAddress.setDescription('IP address of the TFTP Server from which the TFTP Software Upgrade File Name will be retrieved')
tFTPServerPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tFTPServerPortNumber.setStatus('current')
if mibBuilder.loadTexts: tFTPServerPortNumber.setDescription('The port number of the TFTP Server from which the TFTP Software Upgrade File Name will be retrieved')
tFTPSoftwareUpgradeFileName = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tFTPSoftwareUpgradeFileName.setStatus('current')
if mibBuilder.loadTexts: tFTPSoftwareUpgradeFileName.setDescription('Filename of the Software Upgrade to be loaded from the TFTP Server')
tFTPStartSoftwareUpgrade = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tFTPStartSoftwareUpgrade.setStatus('current')
if mibBuilder.loadTexts: tFTPStartSoftwareUpgrade.setDescription("Write '1' to this attribute to start the TFTP software upgrade process. The attribute will be reset to 0 when the upgrade process has finished. Continuing with the software upgrade will cause spectrum management (iDFS) channel metrics collection to stop. The wireless unit will no longer search for the optimal operational radio channel. The performance of this wireless link may deterioate over time. If operating in a region where RADAR detection is mandatory you must complete the software upgrade and reboot the unit. Channel metrics collection will only be restarted after the wireless unit has been rebooted")
tFTPSoftwareUpgradeStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("idle", 0), ("uploadinprogress", 1), ("uploadsuccessfulprogrammingFLASH", 2), ("upgradesuccessfulreboottorunthenewsoftwareimage", 3), ("upgradefailed", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tFTPSoftwareUpgradeStatus.setStatus('current')
if mibBuilder.loadTexts: tFTPSoftwareUpgradeStatus.setDescription('The current status of the TFTP Software upgrade process')
tFTPSoftwareUpgradeStatusText = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tFTPSoftwareUpgradeStatusText.setStatus('current')
if mibBuilder.loadTexts: tFTPSoftwareUpgradeStatusText.setDescription('Descriptive text describing the status of the TFTP Software upgrade process, including any error details')
tFTPSoftwareUpgradeStatusAdditionalText = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tFTPSoftwareUpgradeStatusAdditionalText.setStatus('current')
if mibBuilder.loadTexts: tFTPSoftwareUpgradeStatusAdditionalText.setDescription('Any additional text describing the status of the TFTP Software upgrade process, including any error details')
hTTPAccessEnabled = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hTTPAccessEnabled.setStatus('current')
if mibBuilder.loadTexts: hTTPAccessEnabled.setDescription('This controls whether or not HTTP access is enabled, i.e. if this is disabled (0) then the unit will not respond to any requests on the HTTP port. Remote management via HTTPS is not affected by this setting. Any change in this setting will not take effect until the unit has been rebooted. (Factory default = Enabled)')
telnetAccessEnabled = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: telnetAccessEnabled.setStatus('current')
if mibBuilder.loadTexts: telnetAccessEnabled.setDescription('This controls whether or not Telnet access is enabled, i.e. if this is disabled (0) then the unit will not respond to any requests on the TELNET port. Any change in this setting will not take effect until the unit has been rebooted. (Factory default = Disabled)')
hTTPPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hTTPPortNumber.setStatus('current')
if mibBuilder.loadTexts: hTTPPortNumber.setDescription('This controls the port number for HTTP access. A value of zero will use the default port number. Any change in this setting will not take effect until the unit has been rebooted. (Factory default = 80)')
hTTPSPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hTTPSPortNumber.setStatus('current')
if mibBuilder.loadTexts: hTTPSPortNumber.setDescription('This controls the port number for HTTPS access. A value of zero will use the default port number. Availability of HTTPS is controlled via the License Key. Any change in this setting will not take effect until the unit has been rebooted. (Factory default = 443)')
telnetPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: telnetPortNumber.setStatus('current')
if mibBuilder.loadTexts: telnetPortNumber.setDescription('This controls the port number for TELNET access. A value of zero will use the default port number. Any change in this setting will not take effect until the unit has been rebooted. (Factory default = 23)')
hTTPSAccessEnabled = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 9, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hTTPSAccessEnabled.setStatus('current')
if mibBuilder.loadTexts: hTTPSAccessEnabled.setDescription('This controls whether or not HTTPS access is enabled, i.e. if this is disabled (0) then the unit will not respond to any requests on the HTTPS port. Remote management via HTTP is not affected by this setting. Any change in this setting will not take effect until the unit has been rebooted. (Factory default = Enabled)')
linkSymmetry = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 10, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("symmetryAdaptive", 0), ("symmetry2to1", 1), ("symmetry1to1", 2), ("symmetry1to2", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkSymmetry.setStatus('current')
if mibBuilder.loadTexts: linkSymmetry.setDescription('Link Symmetry: in fixed symmetric mode the master spends an equal amount of time transmitting and receiving; in fixed asymmetric modes the master transmit and receive times have a fixed ratio; in adaptive mode the transmit and receive periods adjust with applied load.')
linkModeOptimisation = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 10, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("iPTraffic", 0), ("tDMTraffic", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkModeOptimisation.setStatus('current')
if mibBuilder.loadTexts: linkModeOptimisation.setDescription("Optimises the wireless link behavior for the type of traffic to be carried. In 'IP Traffic' mode, the wireless link will minimise transmission errors by automatically adapting the modulation mode. In 'TDM Traffic' mode, the wireless link will minimise transmission latency and preserve throughput by maintaining a higher modulation mode in the presence of a limited number of errors. The 'TDM Traffic' mode is recommended for E1/T1 applications and connectionless protocols.")
userConfiguredMaxModulationMode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 10, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("modBpsk63percent", 0), ("modQpsk63percent", 1), ("modQpsk87percent", 2), ("mod16qam63percent", 3), ("mod16qam87percent", 4), ("mod64qam75percent", 5), ("mod64qam92percent", 6), ("mod256qam81percent", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userConfiguredMaxModulationMode.setStatus('current')
if mibBuilder.loadTexts: userConfiguredMaxModulationMode.setDescription('The maximum receive modulation mode controls the MAX mode used for adaptive modulation. NOTE: This attribute is automatically disabled when the installation agent is armed. To disarm the installation agent use the installation wizard')
remoteMaximumTransmitPower = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 10, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-15, 27))).setMaxAccess("readonly")
if mibBuilder.loadTexts: remoteMaximumTransmitPower.setStatus('current')
if mibBuilder.loadTexts: remoteMaximumTransmitPower.setDescription('Maximum transmit power the remote wireless unit is permitted to use when establishing and maintaining the wireless link')
txColorCode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 10, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("a", 0), ("b", 1), ("c", 2), ("d", 3), ("e", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: txColorCode.setStatus('current')
if mibBuilder.loadTexts: txColorCode.setDescription('Transmit Color Code. This attribute need only be considered when the unit is installed in a network of PTP600 units and where some of the units are operating on the same frequency. In this case, the value would normally be derived by a network planner. In all other cases, it is recommended that this attribute is left at the default value. NOTE: For the link to operate, the value of this parameter must in all cases match the value of the Rx Color Code parameter at the far end of the link.')
rxColorCode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 10, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("a", 0), ("b", 1), ("c", 2), ("d", 3), ("e", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rxColorCode.setStatus('current')
if mibBuilder.loadTexts: rxColorCode.setDescription('Receive Color Code. This attribute need only be considered when the unit is installed in a network of PTP600 units and where some of the units are operating on the same frequency. In this case, the value would normally be derived by a network planner. In all other cases, it is recommended that this attribute is left at the default value. NOTE: For the link to operate, the value of this parameter must in all cases match the value of the Tx Color Code parameter at the far end of the link.')
automaticTxPowerControl = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 10, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1), ("enabledwithinitialestimate", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: automaticTxPowerControl.setStatus('current')
if mibBuilder.loadTexts: automaticTxPowerControl.setDescription('When enabled, the transmit power of the local end is automatically adjusted to maintain a configurable target receive signal level at the remote end of the link. When set to Enable with initial estimate, the Slave estimates the optional transmit power before it responds to the Master.')
remoteRxTargetPower = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 10, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-600, -400))).setMaxAccess("readonly")
if mibBuilder.loadTexts: remoteRxTargetPower.setStatus('current')
if mibBuilder.loadTexts: remoteRxTargetPower.setDescription('This is the target receive level for the end of the link which is remote from the end being configured. When Automatic Power Control is enabled, the transmit power of the local end is automatically adjusted in order to maintain the receive signal level at the remote end within a range centered on this value')
receivePower = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: receivePower.setStatus('current')
if mibBuilder.loadTexts: receivePower.setDescription('Receive power expressed in tenths of a dBm')
vectorError = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vectorError.setStatus('current')
if mibBuilder.loadTexts: vectorError.setDescription("The vector error measurement compares the received signal's IQ modulation characteristics to an ideal signal to determine the composite error vector magnitude. The value represented by this attribute is the average vector error over the previous second expressed in tenths of a dB")
transmitPower = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: transmitPower.setStatus('current')
if mibBuilder.loadTexts: transmitPower.setDescription('Transmit power expressed in tenths of a dBm')
range = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: range.setStatus('current')
if mibBuilder.loadTexts: range.setDescription('Distance between the two peer wireless units expressed in tenths of a kilometer')
linkLoss = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-500, 500))).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkLoss.setStatus('current')
if mibBuilder.loadTexts: linkLoss.setDescription('The wireless link loss expressed in tenths of a dB')
receiveChannel = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: receiveChannel.setStatus('current')
if mibBuilder.loadTexts: receiveChannel.setDescription('Current active receive channel')
transmitChannel = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: transmitChannel.setStatus('current')
if mibBuilder.loadTexts: transmitChannel.setDescription('Current active transmit channel')
receiveModulationMode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24))).clone(namedValues=NamedValues(("modAcquisition", 0), ("modBpsk63percent", 1), ("modQpsk63percentSingle", 2), ("modTransient1", 3), ("modQpsk87percentSingle", 4), ("modTransient2", 5), ("mod16qam63percentSingleA", 6), ("modTransient3", 7), ("mod16qam87percentSingle", 8), ("modTransient4", 9), ("mod64qam75percentSingle", 10), ("modTransient5", 11), ("mod64qam92percentSingle", 12), ("modTransient6", 13), ("mod256qam81percentSingle", 14), ("mod16qam63percentSingleB", 15), ("mod16qam63percentDual", 16), ("modTransient7", 17), ("mod16qam87percentDual", 18), ("modTransient8", 19), ("mod64qam75percentDual", 20), ("modTransient9", 21), ("mod64qam92percentDual", 22), ("modTransient10", 23), ("mod256qam81percentDual", 24)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: receiveModulationMode.setStatus('current')
if mibBuilder.loadTexts: receiveModulationMode.setDescription('Current active receive modulation mode')
transmitModulationMode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24))).clone(namedValues=NamedValues(("modAcquisition", 0), ("modBpsk63percent", 1), ("modQpsk63percentSingle", 2), ("modTransient1", 3), ("modQpsk87percentSingle", 4), ("modTransient2", 5), ("mod16qam63percentSingleA", 6), ("modTransient3", 7), ("mod16qam87percentSingle", 8), ("modTransient4", 9), ("mod64qam75percentSingle", 10), ("modTransient5", 11), ("mod64qam92percentSingle", 12), ("modTransient6", 13), ("mod256qam81percentSingle", 14), ("mod16qam63percentSingleB", 15), ("mod16qam63percentDual", 16), ("modTransient7", 17), ("mod16qam87percentDual", 18), ("modTransient8", 19), ("mod64qam75percentDual", 20), ("modTransient9", 21), ("mod64qam92percentDual", 22), ("modTransient10", 23), ("mod256qam81percentDual", 24)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: transmitModulationMode.setStatus('current')
if mibBuilder.loadTexts: transmitModulationMode.setDescription('Current active transmit modulation mode')
receiveFreqMHz = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5925))).setMaxAccess("readonly")
if mibBuilder.loadTexts: receiveFreqMHz.setStatus('current')
if mibBuilder.loadTexts: receiveFreqMHz.setDescription('Current receive frequency expressed in integer MHz')
transmitFreqMHz = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5925))).setMaxAccess("readonly")
if mibBuilder.loadTexts: transmitFreqMHz.setStatus('current')
if mibBuilder.loadTexts: transmitFreqMHz.setDescription('Current transmit frequency expressed in integer MHz')
signalStrengthRatio = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: signalStrengthRatio.setStatus('current')
if mibBuilder.loadTexts: signalStrengthRatio.setDescription('Signal strength ratio (Vertical / Horizontal) : the number of dB larger the Vertical antenna input is than the Horizontal antenna input expressed in tenths of a dB')
receiveFreqKHz = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5925000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: receiveFreqKHz.setStatus('current')
if mibBuilder.loadTexts: receiveFreqKHz.setDescription('Current receive frequency expressed as KHz')
transmitFreqKHz = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5925000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: transmitFreqKHz.setStatus('current')
if mibBuilder.loadTexts: transmitFreqKHz.setDescription('Current transmit frequency expressed as KHz')
searchState = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("registering", 0), ("searching", 1), ("acquiring", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: searchState.setStatus('current')
if mibBuilder.loadTexts: searchState.setDescription("Search status of the wireless modem. 'Registering' means that the modem has locked to an OFDM signal, and the wireless link is up. 'Searching' means that no wireless signal has been detected. 'Acquiring' means that a wireless signal has been detected, but the modem has not locked to an OFDM signal.")
rawReceivePower = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 12, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rawReceivePower.setStatus('current')
if mibBuilder.loadTexts: rawReceivePower.setDescription('Raw receive power expressed in tenths of a dBm')
unitOutOfCalibration = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("calibrated", 0), ("outOfCalibration", 1), ("outOfCalibrationPAsShutdown", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: unitOutOfCalibration.setStatus('current')
if mibBuilder.loadTexts: unitOutOfCalibration.setDescription('The unit is out of calibration')
encryptionEnabledMismatch = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ok", 0), ("encryptionEnabledMismatch", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: encryptionEnabledMismatch.setStatus('current')
if mibBuilder.loadTexts: encryptionEnabledMismatch.setDescription('Encryption has been enabled on one end of the wireless link but not the other')
incompatibleRegionCodes = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ok", 0), ("incompatibleLicenceKeys", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: incompatibleRegionCodes.setStatus('current')
if mibBuilder.loadTexts: incompatibleRegionCodes.setDescription('The master and slave units have incompatible region codes. NB: Both units must have compatible license keys')
noWirelessChannelAvailable = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ok", 0), ("noWirelessChannelAvailable", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: noWirelessChannelAvailable.setStatus('current')
if mibBuilder.loadTexts: noWirelessChannelAvailable.setDescription('Spectrum Management was unable to locate a suitable wireless channel to operate on')
wirelessLinkDisabledWarning = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ok", 0), ("disabledBySNMPifAdminStatus", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wirelessLinkDisabledWarning.setStatus('current')
if mibBuilder.loadTexts: wirelessLinkDisabledWarning.setDescription('The SNMP ifAdminStatus of the wireless interface has disabled wireless traffic')
dataPortDisabledWarning = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ok", 0), ("disabledBySNMPifAdminStatus", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataPortDisabledWarning.setStatus('current')
if mibBuilder.loadTexts: dataPortDisabledWarning.setDescription('The SNMP ifAdminStatus of the Ethernet interface has disabled Ethernet traffic')
dataPortFiberStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("ok", 0), ("installedFiberNotLicensed", 1), ("fiberLinkNotEstablishedButLOSNotDetected", 2), ("fiberLinkNotEstablishedAndLOSDetected", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataPortFiberStatus.setStatus('current')
if mibBuilder.loadTexts: dataPortFiberStatus.setDescription('If the fiber link is not OK, there are three possible causes: Either the fiber link has been installed but disabled (because the license key does not include fiber support), the link could not be established even though no LOS was detected (i.e. an optical carrier was detected, which could be due to a broken TX fiber, or because the link is disabled at the fiber link partner), or the link could not be established and LOS is detected (i.e. no optical carrier is detected). NB: a change of status may generate an SNMP trap and/or SMTP email alert')
dataPortConfigurationMismatch = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("noError", 0), ("mismatchDetected", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataPortConfigurationMismatch.setStatus('current')
if mibBuilder.loadTexts: dataPortConfigurationMismatch.setDescription('The detection of Ethernet fragments (runt packets) when the link is in full duplex is an indication of an auto-negotiation or forced configuration mismatch')
incompatibleMasterAndSlave = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("ok", 0), ("incompatibleProductVariants", 1), ("differentSoftwareVersionsRunning", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: incompatibleMasterAndSlave.setStatus('current')
if mibBuilder.loadTexts: incompatibleMasterAndSlave.setDescription('A non-zero value indicates that the master and slave ends of a link are different hardware products, or have different software versions. NB: It is very unusual to detect this, because incompatible units will normally fail to establish a wireless link. However, some combinations may establish a partial wireless link and detect this situation. NB: A non-zero value may generate an SNMP trap and/or SMTP email alert')
tDDSynchronizationStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("locked", 0), ("holdover", 1), ("holdoverNotConnected", 2), ("acquiringLock", 3), ("noTimingReference", 4), ("timingSystemNotConnected", 5), ("initialising", 6), ("clusterTimingMaster", 7), ("tDDSyncNotActive", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tDDSynchronizationStatus.setStatus('current')
if mibBuilder.loadTexts: tDDSynchronizationStatus.setDescription('An alarm value (displayed in red) indicates that one of the following conditions has not been met (i) An external timing system is connected or internal timing has been configured. (ii) The timing system is providing a useable reference. (iii) The ODU is locked to this reference.')
managementPortDisabledWarning = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ok", 0), ("disabledBySNMPifAdminStatus", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: managementPortDisabledWarning.setStatus('current')
if mibBuilder.loadTexts: managementPortDisabledWarning.setDescription('The SNMP ifAdminStatus of the out-of-band management interface has disabled Ethernet traffic')
tDDSynchronizationAlarm = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("ok", 0), ("notSynchronized", 1), ("timingSystemFailure", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tDDSynchronizationAlarm.setStatus('current')
if mibBuilder.loadTexts: tDDSynchronizationAlarm.setDescription('Not Synchronized indicates failure of the external timing system for a period greater than the holdover timer. TDD frame timing will have drifted, and this link may cause interference with other links in a synchronized network. Timing System Failure is an early warning, indicating that 80% of the holdover period has expired.')
linkModeOptimizationMismatch = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ok", 0), ("linkModeOptimizationMismatch", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkModeOptimizationMismatch.setStatus('current')
if mibBuilder.loadTexts: linkModeOptimizationMismatch.setDescription('The remote node of this link has been configured with a different link mode optimization')
managementPortConfigurationMismatch = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("noError", 0), ("mismatchDetected", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: managementPortConfigurationMismatch.setStatus('current')
if mibBuilder.loadTexts: managementPortConfigurationMismatch.setDescription('The detection of Ethernet fragments (runt packets) when the link is in full duplex is an indication of an auto-negotiation or forced configuration mismatch. This alarm is only raised if an Out-of-Band Management Mode has been configured.')
secureModeAlarm = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("secureModeIsActive", 0), ("secureModeIsNotConfigured", 1), ("secureModeIsConfiguredButNotActive", 2), ("secureModeIsNotSupported", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: secureModeAlarm.setStatus('current')
if mibBuilder.loadTexts: secureModeAlarm.setDescription('The secure mode (e.g. FIPS, UC-APL) state of the unit. The secure mode is not configured(1) state indicates that the unit is capable of secure mode operation, and one or more of the following security materials has not been configured: Key Of Keys, Private Key, Public Certificate, DRNG Entropy Status, Wireless Encryption Key. The secure mode mode is configured but not active(2) state indicates that the unit is capable of secure mode operation, and the security material has been configured, but the configuration of interfaces is not consistent with secure mode operation. The secure mode is not supported(3) state indicates that the unit is not capable of secure mode operation. The secure mode capability requires appropriate hardware compatibility, license key and software image.')
ethernetBridgingStatusAlarm = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 13, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ethernetBridgingEnabled", 0), ("ethernetBridgingDisabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ethernetBridgingStatusAlarm.setStatus('current')
if mibBuilder.loadTexts: ethernetBridgingStatusAlarm.setDescription('Ethernet traffic bridging is disabled because the wireless link is not operating at the configured Lowest Ethernet Modulation Mode')
sMTPEmailAlert = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 15, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sMTPEmailAlert.setStatus('current')
if mibBuilder.loadTexts: sMTPEmailAlert.setDescription('Simple Mail Transfer Protocol is used to send equipment alerts via email to a specified email address')
sMTPServerIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 15, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sMTPServerIPAddress.setStatus('current')
if mibBuilder.loadTexts: sMTPServerIPAddress.setDescription('IP address of the SMTP server')
sMTPServerPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 15, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sMTPServerPortNumber.setStatus('current')
if mibBuilder.loadTexts: sMTPServerPortNumber.setDescription('Port number of the SMTP server')
sMTPSourceEmailAddress = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 15, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sMTPSourceEmailAddress.setStatus('current')
if mibBuilder.loadTexts: sMTPSourceEmailAddress.setDescription("The 'from' email address used when constructing the automatically generated e-mail message")
sMTPDestinationEmailAddress = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 15, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sMTPDestinationEmailAddress.setStatus('current')
if mibBuilder.loadTexts: sMTPDestinationEmailAddress.setDescription("The 'to' email address used when constructing the automatically generated e-mail message")
sMTPEnabledMessages = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 15, 6), Bits().clone(namedValues=NamedValues(("telecomsChannelUpDown", 1), ("managementPortUpDown", 2), ("dataPortUpDown", 3), ("enabledDiagnosticAlarms", 4), ("dFSImpulseInterference", 5), ("dFSChannelChange", 6), ("wirelessLinkUpDown", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sMTPEnabledMessages.setStatus('current')
if mibBuilder.loadTexts: sMTPEnabledMessages.setDescription('This controls which SMTP messages the unit will send')
sNMPPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 16, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNMPPortNumber.setStatus('current')
if mibBuilder.loadTexts: sNMPPortNumber.setDescription('The IP port number used to access the SNMP MIB (i.e. Gets and Sets) (default = 161). NB: A system reboot is required to activate changes to this attribute')
sNMPCommunityString = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 16, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNMPCommunityString.setStatus('current')
if mibBuilder.loadTexts: sNMPCommunityString.setDescription('The SNMP community string. NB: A system reboot is required to activate changes to this attribute')
sNMPTrapTableNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 16, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sNMPTrapTableNumber.setStatus('current')
if mibBuilder.loadTexts: sNMPTrapTableNumber.setDescription('Number of entries in the sNMPTrapTable.')
sNMPTrapVersion = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 16, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("v1", 0), ("v2c", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNMPTrapVersion.setStatus('current')
if mibBuilder.loadTexts: sNMPTrapVersion.setDescription('The SNMP protocol version to use for SNMP Traps. NB: A system reboot is required to activate changes to this attribute')
sNMPEnabledTraps = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 16, 6), Bits().clone(namedValues=NamedValues(("managementPortUpDown", 0), ("dataPortUpDown", 1), ("authenticationFailure", 2), ("enabledDiagnosticAlarms", 3), ("dFSImpulseInterference", 4), ("dFSChannelChange", 5), ("wirelessLinkUpDown", 6), ("coldStart", 7), ("telecomsChannelUpDown", 15)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNMPEnabledTraps.setStatus('current')
if mibBuilder.loadTexts: sNMPEnabledTraps.setDescription('This controls which SNMP Traps the unit will send')
enabledDiagnosticAlarms = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 16, 7), Bits().clone(namedValues=NamedValues(("incompatibleMasterAndSlave", 0), ("incompatibleRegionCodes", 1), ("unknown6", 2), ("unknown5", 3), ("unitOutOfCalibration", 4), ("installArmState", 5), ("installStatus", 6), ("regionCode", 7), ("telecomsChannelAStatus", 8), ("dataPortFiberStatus", 9), ("dataPortStatus", 10), ("dataPortDisabledWarning", 11), ("wirelessLinkDisabledWarning", 12), ("sNTPSyncronisationFailed", 13), ("noWirelessChannelAvailable", 14), ("dataPortConfigurationMismatch", 15), ("syslogDisabledWarning", 16), ("linkModeOptimizationMismatch", 17), ("managementPortStatus", 18), ("managementPortDisabledWarning", 19), ("tDDSynchronizationAlarm", 20), ("telecomsChannelBLoopback", 21), ("telecomsChannelALoopback", 22), ("telecomsChannelBStatus", 23), ("ethernetBridgingStatus", 26), ("secureMode", 27), ("syslogClientDisabledWarning", 28), ("managementPortConfigurationMismatch", 29), ("syslogLocalWrapped", 30), ("syslogLocalNearlyFull", 31)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: enabledDiagnosticAlarms.setStatus('current')
if mibBuilder.loadTexts: enabledDiagnosticAlarms.setDescription("Controls which individual diagnostic alarms are enabled. The unit may be configured to generate SNMP traps and/or SMTP email alerts for these enabled alarms by selecting 'Enabled Diagnostic Alarms' on the 'SNMP Enabled Traps' and/or 'SMTP Enabled Messages' attributes")
sNMPTrapTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 16, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2)))
if mibBuilder.loadTexts: sNMPTrapTableIndex.setStatus('current')
if mibBuilder.loadTexts: sNMPTrapTableIndex.setDescription('SNMP trap value, used to index the SNMPTrapTable.')
sNMPTrapIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 16, 4, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNMPTrapIPAddress.setStatus('current')
if mibBuilder.loadTexts: sNMPTrapIPAddress.setDescription('The IP address to which all SNMP Traps are sent. An IP Address of 0.0.0.0 disables all TRAP generation. NB: A system reboot is required to activate changes to this attribute.')
sNMPTrapPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 16, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNMPTrapPortNumber.setStatus('current')
if mibBuilder.loadTexts: sNMPTrapPortNumber.setDescription('Destination port for SNMP Traps (default=162). A value of 0 will disable the trap receiver. NB: A system reboot is required to activate changes to this attribute.')
sNTPState = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNTPState.setStatus('current')
if mibBuilder.loadTexts: sNTPState.setDescription('SNTP control state')
sNTPPollInterval = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(60, 43200))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNTPPollInterval.setStatus('current')
if mibBuilder.loadTexts: sNTPPollInterval.setDescription('The SNTP server polling interval')
sNTPSync = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("noSync", 0), ("inSync", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sNTPSync.setStatus('current')
if mibBuilder.loadTexts: sNTPSync.setDescription('If SNTP Sync fails then check the server settings in the Remote Management page, or disable SNTP')
sNTPLastSync = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sNTPLastSync.setStatus('current')
if mibBuilder.loadTexts: sNTPLastSync.setDescription('Last SNTP sync time')
systemClock = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemClock.setStatus('current')
if mibBuilder.loadTexts: systemClock.setDescription('System clock presented as local time')
timeZone = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50))).clone(namedValues=NamedValues(("gmtMinus1200", 0), ("gmtMinus1130", 1), ("gmtMinus1100", 2), ("gmtMinus1030", 3), ("gmtMinus1000", 4), ("gmtMinus0930", 5), ("gmtMinus0900", 6), ("gmtMinus0830", 7), ("gmtMinus0800", 8), ("gmtMinus0730", 9), ("gmtMinus0700", 10), ("gmtMinus0630", 11), ("gmtMinus0600", 12), ("gmtMinus0530", 13), ("gmtMinus0500", 14), ("gmtMinus0430", 15), ("gmtMinus0400", 16), ("gmtMinus0330", 17), ("gmtMinus0300", 18), ("gmtMinus0230", 19), ("gmtMinus0200", 20), ("gmtMinus0130", 21), ("gmtMinus0100", 22), ("gmtMinus0030", 23), ("gmtZero", 24), ("gmtPlus0030", 25), ("gmtPlus0100", 26), ("gmtPlus0130", 27), ("gmtPlus0200", 28), ("gmtPlus0230", 29), ("gmtPlus0300", 30), ("gmtPlus0330", 31), ("gmtPlus0400", 32), ("gmtPlus0430", 33), ("gmtPlus0500", 34), ("gmtPlus0530", 35), ("gmtPlus0600", 36), ("gmtPlus0630", 37), ("gmtPlus0700", 38), ("gmtPlus0730", 39), ("gmtPlus0800", 40), ("gmtPlus0830", 41), ("gmtPlus0900", 42), ("gmtPlus0930", 43), ("gmtPlus1000", 44), ("gmtPlus1030", 45), ("gmtPlus1100", 46), ("gmtPlus1130", 47), ("gmtPlus1200", 48), ("gmtPlus1230", 49), ("gmtPlus1300", 50)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: timeZone.setStatus('current')
if mibBuilder.loadTexts: timeZone.setDescription('Time zone offsets from Greenwich Mean Time (GMT)')
daylightSaving = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: daylightSaving.setStatus('current')
if mibBuilder.loadTexts: daylightSaving.setDescription('Daylight Saving Time')
sNTPPrimaryServer = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("server1", 0), ("server2", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNTPPrimaryServer.setStatus('current')
if mibBuilder.loadTexts: sNTPPrimaryServer.setDescription('Specifies the primary SNTP server, determining the order in which the servers are tried.')
sNTPPrimaryServerDeadTime = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 86400))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNTPPrimaryServerDeadTime.setStatus('current')
if mibBuilder.loadTexts: sNTPPrimaryServerDeadTime.setDescription('Time (in seconds) to wait before retrying communications with an unresponsive primary SNTP server. Setting the value to 0 disables the timer.')
sNTPServerRetries = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNTPServerRetries.setStatus('current')
if mibBuilder.loadTexts: sNTPServerRetries.setDescription('Number of times the PTP will retry after an SNTP server fails to respond.')
sNTPServerTimeout = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNTPServerTimeout.setStatus('current')
if mibBuilder.loadTexts: sNTPServerTimeout.setDescription('Time (in seconds) the PTP will wait for a response from an SNTP server.')
sNTPServerTableNumber = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 17, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sNTPServerTableNumber.setStatus('current')
if mibBuilder.loadTexts: sNTPServerTableNumber.setDescription('Number of entries in the SNTPServerTable.')
sNTPServerTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 17, 15, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2)))
if mibBuilder.loadTexts: sNTPServerTableIndex.setStatus('current')
if mibBuilder.loadTexts: sNTPServerTableIndex.setDescription('Index range for the SNTPServerTable.')
sNTPServerIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 17, 15, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNTPServerIPAddress.setStatus('current')
if mibBuilder.loadTexts: sNTPServerIPAddress.setDescription('The IP address of a valid SNTP server')
sNTPServerPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 17, 15, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sNTPServerPortNumber.setStatus('current')
if mibBuilder.loadTexts: sNTPServerPortNumber.setDescription('The IP port number of the SNTP server. Defaults to port 123.')
sNTPServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 17713, 6, 17, 15, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sNTPServerStatus.setStatus('current')
if mibBuilder.loadTexts: sNTPServerStatus.setDescription('Status message reflecting the state of communications with the SNTP server')
systemReset = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 18, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("running", 0), ("consoleReboot", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemReset.setStatus('current')
if mibBuilder.loadTexts: systemReset.setDescription("Setting this attribute to '1' will cause a system reboot. NB: a system reboot will apply any pending new settings held in memory")
softwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 19, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: softwareVersion.setStatus('current')
if mibBuilder.loadTexts: softwareVersion.setDescription('Current software version')
hardwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 19, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hardwareVersion.setStatus('current')
if mibBuilder.loadTexts: hardwareVersion.setDescription('Hardware platform version')
secondarySoftwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 19, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: secondarySoftwareVersion.setStatus('current')
if mibBuilder.loadTexts: secondarySoftwareVersion.setDescription('Secondary software version, used when the primary software image is invalid or erased')
bootVersion = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 19, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bootVersion.setStatus('current')
if mibBuilder.loadTexts: bootVersion.setDescription('Boot code software version')
receiveDataRate = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 20, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: receiveDataRate.setStatus('current')
if mibBuilder.loadTexts: receiveDataRate.setDescription('Average data rate over the last one second interval (kbps)')
transmitDataRate = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 20, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: transmitDataRate.setStatus('current')
if mibBuilder.loadTexts: transmitDataRate.setDescription('Average data rate over the last one second interval (kbps)')
aggregateDataRate = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 20, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: aggregateDataRate.setStatus('current')
if mibBuilder.loadTexts: aggregateDataRate.setDescription('Average data rate over the last one second interval (kbps)')
wirelessLinkAvailability = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 20, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wirelessLinkAvailability.setStatus('current')
if mibBuilder.loadTexts: wirelessLinkAvailability.setDescription('Link availability calculated since the last reset of the system counters, as a percentage multiplied by 10000 to give four decimal places of precision')
wirelessLinkStatus = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 20, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("up", 0), ("registering", 1), ("searching", 2), ("acquiring", 3), ("radarCAC", 4), ("initialising", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wirelessLinkStatus.setStatus('current')
if mibBuilder.loadTexts: wirelessLinkStatus.setDescription('Current status of the wireless link')
byteErrorRatio = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 20, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: byteErrorRatio.setStatus('current')
if mibBuilder.loadTexts: byteErrorRatio.setDescription('Reciprocal of the byte error ratio calculated since the last reset of the system counters')
receiveModulationModeDetail = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 20, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("runningAtMaximumReceiveMode", 0), ("runningAtUserConfiguredMaxModulationMode", 1), ("restrictedBecauseInstallationIsArmed", 2), ("restrictedBecauseOfByteErrorsOnTheWirelessLink", 3), ("restrictedBecauseTheLinkParametersAreUpdating", 4), ("restrictedBecauseDFSchannelChangeIsInProgress", 5), ("restrictedDueToTheLowEthernetLinkSpeed", 6), ("runningAtMaximumReceiveModeForChannelBandwidth", 7), ("limitedByTheWirelessConditions", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: receiveModulationModeDetail.setStatus('current')
if mibBuilder.loadTexts: receiveModulationModeDetail.setDescription('The reason for the current receive modulation mode')
ethernetBridgingAvailability = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 20, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ethernetBridgingAvailability.setStatus('current')
if mibBuilder.loadTexts: ethernetBridgingAvailability.setDescription('Link availability bridging Ethernet traffic calculated since the last reset of the system counters, as a percentage multiplied by 10000 to give four decimal places of precision')
encryptionAlgorithm = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 22, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("aESRijndael", 1), ("aES256bitRijndael", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: encryptionAlgorithm.setStatus('current')
if mibBuilder.loadTexts: encryptionAlgorithm.setDescription('The Encryption Algorithm used by the wireless link. NB: A system reboot is required to activate changes to this attribute. NB: The same Encryption Algorithm must be configured at BOTH ends of the link for it to function correctly')
tDDSynchronizationMode = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 23, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tDDSynchronizationMode.setStatus('current')
if mibBuilder.loadTexts: tDDSynchronizationMode.setDescription('TDD Synchronization Mode selection')
syslogClient = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 24, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: syslogClient.setStatus('current')
if mibBuilder.loadTexts: syslogClient.setDescription('Enable or disable the syslog client')
syslogState = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 24, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: syslogState.setStatus('current')
if mibBuilder.loadTexts: syslogState.setDescription('The syslog service has been enabled or disabled')
longitude = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 96, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 19))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: longitude.setStatus('current')
if mibBuilder.loadTexts: longitude.setDescription('The longitude of the unit, measured in decimal degrees. This object is set by the SNMP manager and has no internal function.')
latitude = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 96, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 19))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latitude.setStatus('current')
if mibBuilder.loadTexts: latitude.setDescription('The latitude of the unit, measured in decimal degrees. This object is set by the SNMP manager and has no internal function.')
altitude = MibScalar((1, 3, 6, 1, 4, 1, 17713, 6, 96, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: altitude.setStatus('current')
if mibBuilder.loadTexts: altitude.setDescription('The altitude of the unit, measured in metres. This object is set by the SNMP manager and has no internal function.')
dfsChannelChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 1)).setObjects(("CAMBIUM-PTP600-MIB", "receiveChannel"))
if mibBuilder.loadTexts: dfsChannelChangeTrap.setStatus('current')
if mibBuilder.loadTexts: dfsChannelChangeTrap.setDescription('DFS channel change event')
dfsImpulsiveInterferenceTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 2)).setObjects(("CAMBIUM-PTP600-MIB", "receiveChannel"))
if mibBuilder.loadTexts: dfsImpulsiveInterferenceTrap.setStatus('current')
if mibBuilder.loadTexts: dfsImpulsiveInterferenceTrap.setDescription('DFS impulsive interference detected event')
dataPortStatusTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 3)).setObjects(("CAMBIUM-PTP600-MIB", "dataPortStatus"))
if mibBuilder.loadTexts: dataPortStatusTrap.setStatus('current')
if mibBuilder.loadTexts: dataPortStatusTrap.setDescription('Current status of the Ethernet link. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
telecomsChannelAStatusTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 4)).setObjects(("CAMBIUM-PTP600-MIB", "telecomsChannelAStatus"))
if mibBuilder.loadTexts: telecomsChannelAStatusTrap.setStatus('current')
if mibBuilder.loadTexts: telecomsChannelAStatusTrap.setDescription('Current status of telecoms channel A. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
telecomsChannelBStatusTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 5)).setObjects(("CAMBIUM-PTP600-MIB", "telecomsChannelBStatus"))
if mibBuilder.loadTexts: telecomsChannelBStatusTrap.setStatus('current')
if mibBuilder.loadTexts: telecomsChannelBStatusTrap.setDescription('Current status of telecoms channel B. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
channelALoopbackTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 6)).setObjects(("CAMBIUM-PTP600-MIB", "channelALoopback"))
if mibBuilder.loadTexts: channelALoopbackTrap.setStatus('current')
if mibBuilder.loadTexts: channelALoopbackTrap.setDescription("The loopback status of telecoms channel A. This is intended for installation testing and should be set to 'None' for normal operation. The wire connections to a unit can be tested by applying a 'Copper' loopback to the local unit. The wireless connection to the remote unit can be tested by applying a 'Wireless' loopback to the remote unit with no loopback on the local unit. NB: a change of state may generate an SNMP trap and/or SMTP email alert")
channelBLoopbackTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 7)).setObjects(("CAMBIUM-PTP600-MIB", "channelBLoopback"))
if mibBuilder.loadTexts: channelBLoopbackTrap.setStatus('current')
if mibBuilder.loadTexts: channelBLoopbackTrap.setDescription("The loopback status of telecoms channel B. This is intended for installation testing and should be set to 'None' for normal operation. The wire connections to a unit can be tested by applying a 'Copper' loopback to the local unit. The wireless connection to the remote unit can be tested by applying a 'Wireless' loopback to the remote unit with no loopback on the local unit. NB: a change of state may generate an SNMP trap and/or SMTP email alert")
regionCodeTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 8)).setObjects(("CAMBIUM-PTP600-MIB", "regionCode"))
if mibBuilder.loadTexts: regionCodeTrap.setStatus('current')
if mibBuilder.loadTexts: regionCodeTrap.setDescription('The region code prohibits the wireless unit from operating outside the regulated limits. An invalid region code indicates a corrupted license key. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
installStatusTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 9)).setObjects(("CAMBIUM-PTP600-MIB", "installStatus"))
if mibBuilder.loadTexts: installStatusTrap.setStatus('current')
if mibBuilder.loadTexts: installStatusTrap.setDescription('A non-zero value indicates that signalling was received with the wrong MAC address or a mismatched link name. NB: It is very unusual to detect this, because units with mis-configured Target MAC Address will normally fail to establish a wireless link. However, rare circumstances may establish a partial wireless link and detect this situation. NB: A non-zero value on start-up, or a change of value during operation, may generate an SNMP trap and/or SMTP email alert')
installArmStateTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 10)).setObjects(("CAMBIUM-PTP600-MIB", "installArmState"))
if mibBuilder.loadTexts: installArmStateTrap.setStatus('current')
if mibBuilder.loadTexts: installArmStateTrap.setDescription('Indicates if the unit is being installed. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
unitOutOfCalibrationTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 11)).setObjects(("CAMBIUM-PTP600-MIB", "unitOutOfCalibration"))
if mibBuilder.loadTexts: unitOutOfCalibrationTrap.setStatus('current')
if mibBuilder.loadTexts: unitOutOfCalibrationTrap.setDescription('The unit is out of calibration')
encryptionEnabledMismatchTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 13)).setObjects(("CAMBIUM-PTP600-MIB", "encryptionEnabledMismatch"))
if mibBuilder.loadTexts: encryptionEnabledMismatchTrap.setStatus('current')
if mibBuilder.loadTexts: encryptionEnabledMismatchTrap.setDescription('Encryption has been enabled on one end of the wireless link but not the other')
incompatibleRegionCodesTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 14)).setObjects(("CAMBIUM-PTP600-MIB", "incompatibleRegionCodes"))
if mibBuilder.loadTexts: incompatibleRegionCodesTrap.setStatus('current')
if mibBuilder.loadTexts: incompatibleRegionCodesTrap.setDescription('The master and slave units have incompatible region codes. NB: Both units must have compatible license keys')
noWirelessChannelAvailableTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 15)).setObjects(("CAMBIUM-PTP600-MIB", "noWirelessChannelAvailable"))
if mibBuilder.loadTexts: noWirelessChannelAvailableTrap.setStatus('current')
if mibBuilder.loadTexts: noWirelessChannelAvailableTrap.setDescription('Spectrum Management was unable to locate a suitable wireless channel to operate on')
wirelessLinkDisabledWarningTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 16)).setObjects(("CAMBIUM-PTP600-MIB", "wirelessLinkDisabledWarning"))
if mibBuilder.loadTexts: wirelessLinkDisabledWarningTrap.setStatus('current')
if mibBuilder.loadTexts: wirelessLinkDisabledWarningTrap.setDescription('The SNMP ifAdminStatus of the wireless interface has disabled wireless traffic')
dataPortDisabledWarningTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 17)).setObjects(("CAMBIUM-PTP600-MIB", "dataPortDisabledWarning"))
if mibBuilder.loadTexts: dataPortDisabledWarningTrap.setStatus('current')
if mibBuilder.loadTexts: dataPortDisabledWarningTrap.setDescription('The SNMP ifAdminStatus of the Ethernet interface has disabled Ethernet traffic')
dataPortFiberStatusTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 18)).setObjects(("CAMBIUM-PTP600-MIB", "dataPortFiberStatus"))
if mibBuilder.loadTexts: dataPortFiberStatusTrap.setStatus('current')
if mibBuilder.loadTexts: dataPortFiberStatusTrap.setDescription('If the fiber link is not OK, there are three possible causes: Either the fiber link has been installed but disabled (because the license key does not include fiber support), the link could not be established even though no LOS was detected (i.e. an optical carrier was detected, which could be due to a broken TX fiber, or because the link is disabled at the fiber link partner), or the link could not be established and LOS is detected (i.e. no optical carrier is detected). NB: a change of status may generate an SNMP trap and/or SMTP email alert')
dataPortConfigurationMismatchTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 19)).setObjects(("CAMBIUM-PTP600-MIB", "dataPortConfigurationMismatch"))
if mibBuilder.loadTexts: dataPortConfigurationMismatchTrap.setStatus('current')
if mibBuilder.loadTexts: dataPortConfigurationMismatchTrap.setDescription('The detection of Ethernet fragments (runt packets) when the link is in full duplex is an indication of an auto-negotiation or forced configuration mismatch')
incompatibleMasterAndSlaveTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 20)).setObjects(("CAMBIUM-PTP600-MIB", "incompatibleMasterAndSlave"))
if mibBuilder.loadTexts: incompatibleMasterAndSlaveTrap.setStatus('current')
if mibBuilder.loadTexts: incompatibleMasterAndSlaveTrap.setDescription('A non-zero value indicates that the master and slave ends of a link are different hardware products, or have different software versions. NB: It is very unusual to detect this, because incompatible units will normally fail to establish a wireless link. However, some combinations may establish a partial wireless link and detect this situation. NB: A non-zero value may generate an SNMP trap and/or SMTP email alert')
sNTPSyncTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 21)).setObjects(("CAMBIUM-PTP600-MIB", "sNTPSync"))
if mibBuilder.loadTexts: sNTPSyncTrap.setStatus('current')
if mibBuilder.loadTexts: sNTPSyncTrap.setDescription('If SNTP Sync fails then check the server settings in the Remote Management page, or disable SNTP')
tDDSynchronizationAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 22)).setObjects(("CAMBIUM-PTP600-MIB", "tDDSynchronizationAlarm"))
if mibBuilder.loadTexts: tDDSynchronizationAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: tDDSynchronizationAlarmTrap.setDescription('Not Synchronized indicates failure of the external timing system for a period greater than the holdover timer. TDD frame timing will have drifted, and this link may cause interference with other links in a synchronized network. Timing System Failure is an early warning, indicating that 80% of the holdover period has expired.')
managementPortStatusTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 23)).setObjects(("CAMBIUM-PTP600-MIB", "managementPortStatus"))
if mibBuilder.loadTexts: managementPortStatusTrap.setStatus('current')
if mibBuilder.loadTexts: managementPortStatusTrap.setDescription('Current status of the out-of-band management link. NB: a change of state may generate an SNMP trap and/or SMTP email alert')
managementPortDisabledWarningTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 24)).setObjects(("CAMBIUM-PTP600-MIB", "managementPortDisabledWarning"))
if mibBuilder.loadTexts: managementPortDisabledWarningTrap.setStatus('current')
if mibBuilder.loadTexts: managementPortDisabledWarningTrap.setDescription('The SNMP ifAdminStatus of the out-of-band management interface has disabled Ethernet traffic')
linkModeOptimizationMismatchTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 25)).setObjects(("CAMBIUM-PTP600-MIB", "linkModeOptimizationMismatch"))
if mibBuilder.loadTexts: linkModeOptimizationMismatchTrap.setStatus('current')
if mibBuilder.loadTexts: linkModeOptimizationMismatchTrap.setDescription('The remote node of this link has been configured with a different link mode optimization')
managementPortConfigurationMismatchTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 28)).setObjects(("CAMBIUM-PTP600-MIB", "managementPortConfigurationMismatch"))
if mibBuilder.loadTexts: managementPortConfigurationMismatchTrap.setStatus('current')
if mibBuilder.loadTexts: managementPortConfigurationMismatchTrap.setDescription('The detection of Ethernet fragments (runt packets) when the link is in full duplex is an indication of an auto-negotiation or forced configuration mismatch. This alarm is only raised if an Out-of-Band Management Mode has been configured.')
syslogStateTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 30)).setObjects(("CAMBIUM-PTP600-MIB", "syslogState"))
if mibBuilder.loadTexts: syslogStateTrap.setStatus('current')
if mibBuilder.loadTexts: syslogStateTrap.setDescription('The syslog service has been enabled or disabled')
syslogLocalNearlyFullTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 31))
if mibBuilder.loadTexts: syslogLocalNearlyFullTrap.setStatus('current')
if mibBuilder.loadTexts: syslogLocalNearlyFullTrap.setDescription('The syslog local log is nearly full (90%)')
syslogLocalWrappedTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 32))
if mibBuilder.loadTexts: syslogLocalWrappedTrap.setStatus('current')
if mibBuilder.loadTexts: syslogLocalWrappedTrap.setDescription('The syslog local log has wrapped')
syslogClientTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 33)).setObjects(("CAMBIUM-PTP600-MIB", "syslogClient"))
if mibBuilder.loadTexts: syslogClientTrap.setStatus('current')
if mibBuilder.loadTexts: syslogClientTrap.setDescription('Enable or disable the syslog client')
secureModeAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 34)).setObjects(("CAMBIUM-PTP600-MIB", "secureModeAlarm"))
if mibBuilder.loadTexts: secureModeAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: secureModeAlarmTrap.setDescription('The secure mode (e.g. FIPS, UC-APL) state of the unit. The secure mode is not configured(1) state indicates that the unit is capable of secure mode operation, and one or more of the following security materials has not been configured: Key Of Keys, Private Key, Public Certificate, DRNG Entropy Status, Wireless Encryption Key. The secure mode mode is configured but not active(2) state indicates that the unit is capable of secure mode operation, and the security material has been configured, but the configuration of interfaces is not consistent with secure mode operation. The secure mode is not supported(3) state indicates that the unit is not capable of secure mode operation. The secure mode capability requires appropriate hardware compatibility, license key and software image.')
ethernetBridgingStatusAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 17713, 6, 99, 0, 35)).setObjects(("CAMBIUM-PTP600-MIB", "ethernetBridgingStatusAlarm"))
if mibBuilder.loadTexts: ethernetBridgingStatusAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: ethernetBridgingStatusAlarmTrap.setDescription('Ethernet traffic bridging is disabled because the wireless link is not operating at the configured Lowest Ethernet Modulation Mode')
mibBuilder.exportSymbols("CAMBIUM-PTP600-MIB", enabledDiagnosticAlarms=enabledDiagnosticAlarms, cableLoss=cableLoss, receiveFreqKHz=receiveFreqKHz, syslogLocalNearlyFullTrap=syslogLocalNearlyFullTrap, eIRP=eIRP, productName=productName, sNTPPrimaryServer=sNTPPrimaryServer, linkModeOptimizationMismatch=linkModeOptimizationMismatch, txColorCode=txColorCode, userConfiguredMaxModulationMode=userConfiguredMaxModulationMode, sntpGroup=sntpGroup, tFTPSoftwareUpgradeStatusText=tFTPSoftwareUpgradeStatusText, dfsNineNinePointNinePercentiles=dfsNineNinePointNinePercentiles, dataPortConfigurationMismatch=dataPortConfigurationMismatch, managementPortDisabledWarningTrap=managementPortDisabledWarningTrap, snmpControlGroup=snmpControlGroup, telecomsChannelAStatus=telecomsChannelAStatus, tFTPSoftwareUpgradeStatus=tFTPSoftwareUpgradeStatus, pubStatsGroup=pubStatsGroup, accessMethod=accessMethod, tDDSynchronizationMode=tDDSynchronizationMode, telecomsChannelAStatusTrap=telecomsChannelAStatusTrap, encryptionEnabledMismatchTrap=encryptionEnabledMismatchTrap, subnetMask=subnetMask, linkModeOptimizationMismatchTrap=linkModeOptimizationMismatchTrap, sNTPServerPortNumber=sNTPServerPortNumber, encryptionGroup=encryptionGroup, sMTPServerPortNumber=sMTPServerPortNumber, channelBLoopback=channelBLoopback, managementPortStatusTrap=managementPortStatusTrap, sNMPCommunityString=sNMPCommunityString, remoteIPAddress=remoteIPAddress, tFTPStartSoftwareUpgrade=tFTPStartSoftwareUpgrade, reset=reset, sNTPPrimaryServerDeadTime=sNTPPrimaryServerDeadTime, dfsTableEntry=dfsTableEntry, installArmStateTrap=installArmStateTrap, wirelessLinkDisabledWarningTrap=wirelessLinkDisabledWarningTrap, remoteMaximumTransmitPower=remoteMaximumTransmitPower, dataPortDisabledWarning=dataPortDisabledWarning, unitOutOfCalibrationTrap=unitOutOfCalibrationTrap, PYSNMP_MODULE_ID=cambium, sNTPServerTableEntry=sNTPServerTableEntry, gatewayIPAddress=gatewayIPAddress, phyStatusGroup=phyStatusGroup, altitude=altitude, sNTPLastSync=sNTPLastSync, systemClock=systemClock, linkModeOptimisation=linkModeOptimisation, transmitFreqMHz=transmitFreqMHz, dataPortFiberStatusTrap=dataPortFiberStatusTrap, phyStatus=phyStatus, mPLSTCPriorityTableNumber=mPLSTCPriorityTableNumber, encryptionAlgorithm=encryptionAlgorithm, dfsTableIndex=dfsTableIndex, useVLANForManagementInterfaces=useVLANForManagementInterfaces, encryption=encryption, targetMACAddress=targetMACAddress, signalStrengthRatio=signalStrengthRatio, unitOutOfCalibration=unitOutOfCalibration, licenceGroup=licenceGroup, masterSlaveMode=masterSlaveMode, hAZLOCConfiguration=hAZLOCConfiguration, ethernetBridgingAvailability=ethernetBridgingAvailability, dataPortStatusTrap=dataPortStatusTrap, sNTPState=sNTPState, smtpGroup=smtpGroup, syslogStateTrap=syslogStateTrap, channelBLineCode=channelBLineCode, wirelessLinkAvailability=wirelessLinkAvailability, tDDControl=tDDControl, frequencyVariant=frequencyVariant, transmitFreqKHz=transmitFreqKHz, sNMPTrapTableNumber=sNMPTrapTableNumber, sMTPDestinationEmailAddress=sMTPDestinationEmailAddress, hTTPSPortNumber=hTTPSPortNumber, l2CPPriorityTable=l2CPPriorityTable, dSCPManagementPriority=dSCPManagementPriority, sMTPEmailAlert=sMTPEmailAlert, bandwidthVariant=bandwidthVariant, hardwareVersion=hardwareVersion, secureModeAlarm=secureModeAlarm, constantPowerSpectralDensity=constantPowerSpectralDensity, sMTPServerIPAddress=sMTPServerIPAddress, regionCode=regionCode, sNTPSync=sNTPSync, sNTPServerTimeout=sNTPServerTimeout, sNTPServerIPAddress=sNTPServerIPAddress, installArmState=installArmState, receiveDataRate=receiveDataRate, dfsPeaks=dfsPeaks, ptpGroups=ptpGroups, linkSymmetry=linkSymmetry, tDDSynchronizationStatus=tDDSynchronizationStatus, versionsGroup=versionsGroup, ptmp=ptmp, dfsChannelChangeTrap=dfsChannelChangeTrap, sNTPSyncTrap=sNTPSyncTrap, wirelessLinkDisabledWarning=wirelessLinkDisabledWarning, ethernet=ethernet, managementPortSpeedAndDuplex=managementPortSpeedAndDuplex, channelALoopbackTrap=channelALoopbackTrap, bridge=bridge, incompatibleMasterAndSlaveTrap=incompatibleMasterAndSlaveTrap, hTTPPortNumber=hTTPPortNumber, mPLSTCPriorityTableEntry=mPLSTCPriorityTableEntry, ethernetFiberSupport=ethernetFiberSupport, l2CPPriorityTableIndex=l2CPPriorityTableIndex, automaticTxPowerControl=automaticTxPowerControl, iPAddress=iPAddress, timeZone=timeZone, managementGroup=managementGroup, ptpTraps=ptpTraps, configuration=configuration, alarmsGroup=alarmsGroup, l2CPPriorityTableEntry=l2CPPriorityTableEntry, dataPortStatus=dataPortStatus, range=range, tDDSynchronizationAlarm=tDDSynchronizationAlarm, incompatibleRegionCodes=incompatibleRegionCodes, managementPortConfigurationMismatch=managementPortConfigurationMismatch, sntp=sntp, channelALoopback=channelALoopback, receiveChannel=receiveChannel, sNMPTrapTableIndex=sNMPTrapTableIndex, ethernetPriorityQueueMapping=ethernetPriorityQueueMapping, telecomsInterface=telecomsInterface, transmitModulationMode=transmitModulationMode, supplementaryGroup=supplementaryGroup, managementPortDisabledWarning=managementPortDisabledWarning, sNMPTrapVersion=sNMPTrapVersion, installStatus=installStatus, searchState=searchState, receiveModulationModeDetail=receiveModulationModeDetail, tFTPServerPortNumber=tFTPServerPortNumber, longitude=longitude, dataPortConfigurationMismatchTrap=dataPortConfigurationMismatchTrap, dfs=dfs, sNMPTrapPortNumber=sNMPTrapPortNumber, dfsTable=dfsTable, incompatibleMasterAndSlave=incompatibleMasterAndSlave, sNTPServerTableNumber=sNTPServerTableNumber, maximumTransmitPower=maximumTransmitPower, managementPortConfigurationMismatchTrap=managementPortConfigurationMismatchTrap, receiveModulationMode=receiveModulationMode, l2CPPriorityQueueMapping=l2CPPriorityQueueMapping, sNMPv3Enable=sNMPv3Enable, dataPortAutoMdix=dataPortAutoMdix, byteErrorRatio=byteErrorRatio, dataPortAutoNegotiation=dataPortAutoNegotiation, telecomsChannelBStatusTrap=telecomsChannelBStatusTrap, licence=licence, ptpCompliance=ptpCompliance, syslogLocalWrappedTrap=syslogLocalWrappedTrap, notificationsGroup=notificationsGroup, iPDSCPPriorityTableEntry=iPDSCPPriorityTableEntry, sNTPServerTable=sNTPServerTable, securityLevel=securityLevel, sNMPPortNumber=sNMPPortNumber, dataPortSpeedAndDuplex=dataPortSpeedAndDuplex, ethernetCappedMaxWirelessSpeed=ethernetCappedMaxWirelessSpeed, snmpControl=snmpControl, sNTPServerStatus=sNTPServerStatus, hTTPAccessEnabled=hTTPAccessEnabled, versions=versions, ethernetPriorityTableIndex=ethernetPriorityTableIndex, ethernetBridgingStatusAlarm=ethernetBridgingStatusAlarm, wirelessLinkStatus=wirelessLinkStatus, ptpTrapPrefix=ptpTrapPrefix, dataPortWirelessDownAlert=dataPortWirelessDownAlert, sNTPServerTableIndex=sNTPServerTableIndex, vLANManagementPriority=vLANManagementPriority, syslogClientTrap=syslogClientTrap, dataPortAutoNegAdvertisement=dataPortAutoNegAdvertisement, syslogClient=syslogClient, latitude=latitude, vectorError=vectorError, l2CPPriorityTableNumber=l2CPPriorityTableNumber, telecomGroup=telecomGroup, noWirelessChannelAvailableTrap=noWirelessChannelAvailableTrap, transmitChannel=transmitChannel, qOSPriorityScheme=qOSPriorityScheme, receivePower=receivePower, unknownNetworkPriorityQueueMapping=unknownNetworkPriorityQueueMapping, telnetAccessEnabled=telnetAccessEnabled, groupID=groupID, telnetPortNumber=telnetPortNumber, ptp=ptp, rxColorCode=rxColorCode, dataPortFiberStatus=dataPortFiberStatus, incompatibleRegionCodesTrap=incompatibleRegionCodesTrap, tDDSynchronizationAlarmTrap=tDDSynchronizationAlarmTrap, configurationGroup=configurationGroup, productVariant=productVariant, supplementary=supplementary, tFTPSoftwareUpgradeStatusAdditionalText=tFTPSoftwareUpgradeStatusAdditionalText, installStatusTrap=installStatusTrap, sNMPTrapTable=sNMPTrapTable, iPDSCPPriorityTableIndex=iPDSCPPriorityTableIndex, ptp600=ptp600, alarms=alarms, dfsTableNumber=dfsTableNumber, syslogControl=syslogControl, ethernetPriorityTable=ethernetPriorityTable, channelACableLength=channelACableLength, phyControl=phyControl, ethernetBridgingStatus=ethernetBridgingStatus, sNMPTrapIPAddress=sNMPTrapIPAddress, dfsImpulsiveInterferenceTrap=dfsImpulsiveInterferenceTrap, bridgeGroup=bridgeGroup, linkName=linkName, resetGroup=resetGroup, sNMPTrapTableEntry=sNMPTrapTableEntry, rangingMode=rangingMode, channelBCableLength=channelBCableLength, iPDSCPPriorityTable=iPDSCPPriorityTable, rawReceivePower=rawReceivePower, dfsMeans=dfsMeans, ethernetPriorityTableNumber=ethernetPriorityTableNumber, managementMode=managementMode, telecomsChannelBStatus=telecomsChannelBStatus, telecomsChannelSelection=telecomsChannelSelection, pubStats=pubStats, hTTPSAccessEnabled=hTTPSAccessEnabled, secondarySoftwareVersion=secondarySoftwareVersion, cambium=cambium, sNTPPollInterval=sNTPPollInterval, dataPortDisabledWarningTrap=dataPortDisabledWarningTrap, regionCodeTrap=regionCodeTrap, syslogState=syslogState, noWirelessChannelAvailable=noWirelessChannelAvailable, sMTPSourceEmailAddress=sMTPSourceEmailAddress, sMTPEnabledMessages=sMTPEnabledMessages, syslogControlGroup=syslogControlGroup, ethernetGroup=ethernetGroup, vLANManagementVID=vLANManagementVID, iPDSCPPriorityQueueMapping=iPDSCPPriorityQueueMapping, targetRange=targetRange, mPLSTCPriorityTableIndex=mPLSTCPriorityTableIndex, encryptionEnabledMismatch=encryptionEnabledMismatch, antennaGain=antennaGain, localPacketFiltering=localPacketFiltering, phyControlGroup=phyControlGroup, ethernetPriorityTableEntry=ethernetPriorityTableEntry, managementPortWirelessDownAlert=managementPortWirelessDownAlert, ethernetBridgingStatusAlarmTrap=ethernetBridgingStatusAlarmTrap, softwareVersion=softwareVersion, mPLSTCPriorityQueueMapping=mPLSTCPriorityQueueMapping, daylightSaving=daylightSaving, channelBandwidth=channelBandwidth, transmitDataRate=transmitDataRate, tFTPSoftwareUpgradeFileName=tFTPSoftwareUpgradeFileName, managementPortStatus=managementPortStatus, siteName=siteName)
mibBuilder.exportSymbols("CAMBIUM-PTP600-MIB", channelALineCode=channelALineCode, smtp=smtp, telecom=telecom, tFTPServerIPAddress=tFTPServerIPAddress, tDDControlGroup=tDDControlGroup, mPLSTCPriorityTable=mPLSTCPriorityTable, dfsGroup=dfsGroup, receiveFreqMHz=receiveFreqMHz, systemReset=systemReset, channelBLoopbackTrap=channelBLoopbackTrap, aggregateDataRate=aggregateDataRate, sNTPServerRetries=sNTPServerRetries, bootVersion=bootVersion, secureModeAlarmTrap=secureModeAlarmTrap, iPDSCPPriorityTableNumber=iPDSCPPriorityTableNumber, management=management, linkLoss=linkLoss, transmitPower=transmitPower, remoteRxTargetPower=remoteRxTargetPower, sNMPEnabledTraps=sNMPEnabledTraps)
|
py | 1a43d2d1d3b1e81cb6b57f0b51a32821cd6e5073 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaebusiness.business import CommandExecutionException
from tekton import router
from gaecookie.decorator import no_csrf
from editar_produto_app import editar_produto_facade
from routes import editar_produtos
from tekton.gae.middleware.redirect import RedirectResponse
@no_csrf
def index():
return TemplateResponse({'save_path': router.to_path(save)}, 'editar_produtos/editar_produto_form.html')
def save(**editar_produto_properties):
cmd = editar_produto_facade.save_editar_produto_cmd(**editar_produto_properties)
try:
cmd()
except CommandExecutionException:
context = {'errors': cmd.errors,
'editar_produto': editar_produto_properties}
return TemplateResponse(context, 'editar_produtos/editar_produto_form.html')
return RedirectResponse(router.to_path(editar_produtos))
|
py | 1a43d416be22f37d1d12beb4c418d49511c12900 | from block_getter import *
from pprint import pprint
import sys
import time
import os
def get_head_to_tail_blocks(tail_block_height, head_block_height, api_interval):
wanted_block_numbers = [block_number for block_number in range(tail_block_height, head_block_height + 1)]
lack_block_numbers = check_lack_blocks(wanted_block_numbers)
task_size = len(lack_block_numbers)
failed_times = 0
while True:
for lack_block_number in lack_block_numbers[:]:
if fetch_block_file(lack_block_number) == True:
lack_block_numbers.remove(lack_block_number)
sys.stdout.write('\rTask ' + str(task_size - len(lack_block_numbers)) + '/' + str(task_size))
sys.stdout.flush()
time.sleep(api_interval)
if len(lack_block_numbers) == 0:
break
else:
failed_times += 1
if failed_times > 10:
return False
merge_blocks_file(os.path.dirname(os.path.abspath(__file__)) + '/Outputs/MONA_Blocks.json')
make_transactions_file(os.path.dirname(os.path.abspath(__file__)) + '/Outputs/MONA_Blocks.json', os.path.dirname(os.path.abspath(__file__)) + '/Outputs/MONA_Transactions.json')
print('')
return True
if __name__ == '__main__':
tail_block_height = int(sys.argv[1])
head_block_height = 0
if len(sys.argv) == 1:
merge_blocks_file(os.path.dirname(os.path.abspath(__file__)) + '/Outputs/MONA_Blocks.json')
make_transactions_file(os.path.dirname(os.path.abspath(__file__)) + '/Outputs/MONA_Blocks.json', os.path.dirname(os.path.abspath(__file__)) + '/Outputs/MONA_Transactions.json')
sys.exit(0)
if len(sys.argv) == 3:
# head to tail
head_block_height = int(sys.argv[2])
get_head_to_tail_blocks(tail_block_height, head_block_height, 1)
sys.exit(0)
else:
# highest to tail
while True:
head_block_height = get_max_height()
if head_block_height == -1:
head_block_height = tail_block_height
get_head_to_tail_blocks(tail_block_height, head_block_height, 1)
time.sleep(10)
sys.exit(-1) |
py | 1a43d564117f80892e41b19c226153af08670d17 | def add_nums(num1, num2, num3, num4, num5):
total = num1 + num2 + num3 + num4 + num5
print(num1, '+', num2, '+', num3, '+', num4, '+', num5, ' = ', total)
def main():
add_nums(1, 2, 0, 0, 0)
add_nums(1, 2, 3, 4, 5)
add_nums(11, 12, 13, 14, 0)
add_nums(101, 201, 301, 0, 0)
main() |
py | 1a43d5737db23f8350c3475701d3ddc58facd14d | #!/usr/bin/env python3
import argparse
from distutils.util import strtobool
import logging
from espnet.transform.transformation import Transformation
from espnet.utils.cli_readers import file_reader_helper
from espnet.utils.cli_utils import get_commandline_args
from espnet.utils.cli_utils import is_scipy_wav_style
from espnet.utils.cli_writers import file_writer_helper
def get_parser():
parser = argparse.ArgumentParser(
description="copy feature with preprocessing",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("--verbose", "-V", default=0, type=int, help="Verbose option")
parser.add_argument(
"--in-filetype",
type=str,
default="mat",
choices=["mat", "hdf5", "sound.hdf5", "sound"],
help="Specify the file format for the rspecifier. "
'"mat" is the matrix format in kaldi',
)
parser.add_argument(
"--out-filetype",
type=str,
default="mat",
choices=["mat", "hdf5", "sound.hdf5", "sound"],
help="Specify the file format for the wspecifier. "
'"mat" is the matrix format in kaldi',
)
parser.add_argument(
"--write-num-frames", type=str, help="Specify wspecifer for utt2num_frames"
)
parser.add_argument(
"--compress", type=strtobool, default=False, help="Save in compressed format"
)
parser.add_argument(
"--compression-method",
type=int,
default=2,
help="Specify the method(if mat) or " "gzip-level(if hdf5)",
)
parser.add_argument(
"--preprocess-conf",
type=str,
default=None,
help="The configuration file for the pre-processing",
)
parser.add_argument(
"rspecifier", type=str, help="Read specifier for feats. e.g. ark:some.ark"
)
parser.add_argument(
"wspecifier", type=str, help="Write specifier. e.g. ark:some.ark"
)
return parser
def main():
parser = get_parser()
args = parser.parse_args()
# logging info
logfmt = "%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s"
if args.verbose > 0:
logging.basicConfig(level=logging.INFO, format=logfmt)
else:
logging.basicConfig(level=logging.WARN, format=logfmt)
logging.info(get_commandline_args())
if args.preprocess_conf is not None:
preprocessing = Transformation(args.preprocess_conf)
logging.info("Apply preprocessing: {}".format(preprocessing))
else:
preprocessing = None
with file_writer_helper(
args.wspecifier,
filetype=args.out_filetype,
write_num_frames=args.write_num_frames,
compress=args.compress,
compression_method=args.compression_method,
) as writer:
for utt, mat in file_reader_helper(args.rspecifier, args.in_filetype):
if is_scipy_wav_style(mat):
# If data is sound file, then got as Tuple[int, ndarray]
rate, mat = mat
if preprocessing is not None:
mat = preprocessing(mat, uttid_list=utt)
# shape = (Time, Channel)
if args.out_filetype in ["sound.hdf5", "sound"]:
# Write Tuple[int, numpy.ndarray] (scipy style)
writer[utt] = (rate, mat)
else:
writer[utt] = mat
if __name__ == "__main__":
main()
|
py | 1a43d77a8f55c08fdc483f0e4b710b5215c52863 | from collections import OrderedDict
from inspect import Signature, Parameter
from typing import Any
from typing import List
import torch
from nncf.common.graph.definitions import MODEL_INPUT_OP_NAME
from nncf.common.graph.definitions import MODEL_OUTPUT_OP_NAME
from nncf.torch.dynamic_graph.patch_pytorch import register_operator
from nncf.torch.dynamic_graph.graph_tracer import ModelInputInfo, create_mock_tensor
from nncf.torch.utils import is_tensor, is_traced_tensor
from nncf.torch.nested_objects_traversal import objwalk
from nncf.common.utils.logger import logger as nncf_logger
@register_operator(name=MODEL_INPUT_OP_NAME)
def nncf_model_input(tensor: 'torch.Tensor'):
return tensor
@register_operator(name=MODEL_OUTPUT_OP_NAME)
def nncf_model_output(tensor: 'torch.Tensor'):
return tensor
def wrap_nncf_model_inputs_with_objwalk(model_args, model_kwargs):
model_args = objwalk(model_args, is_tensor, nncf_model_input)
model_kwargs = objwalk(model_kwargs, is_tensor, nncf_model_input)
return model_args, model_kwargs
def wrap_nncf_model_outputs_with_objwalk(model_outputs):
model_outputs = objwalk(model_outputs, is_traced_tensor, nncf_model_output)
return model_outputs
def replicate_same_tensors(obj: Any) -> Any:
"""
Required to handle the situation when multiple references to one and the
same tensor are present in the input. If tensor replication is not done, then
at runtime one and the same tensor could be wrapped by input/output wrappers twice,
which will disrupt the traced graph structure and possibly hook calls.
"""
observed_tensor_object_ids = set() # type: Set[int]
def replicate_fn(tensor: torch.Tensor) -> torch.Tensor:
tensor_object_id = id(tensor)
if tensor_object_id in observed_tensor_object_ids:
return tensor.clone()
observed_tensor_object_ids.add(tensor_object_id)
return tensor
obj = objwalk(obj, is_tensor, replicate_fn)
return obj
class InputInfoWrapManager:
INPUTS_MISMATCH_WARNING_TEXT = "Compression with regards to this input may occur incorrectly. Make sure " \
"you call the compressed model with inputs that correspond to what NNCF was " \
"configured to expect (either via NNCF config's input_infos, or custom" \
"dummy_forward_fn/wrap_inputs_fn parameters), or that you know what you are " \
"doing. This warning will not be shown again."
ARGS_INPUTS_MISMATCH_FORMAT_STRING = "Inputs mismatch - could not find arg with idx {} in NNCF-wrapped model " \
"input args! " + INPUTS_MISMATCH_WARNING_TEXT
KWARGS_INPUTS_MISMATCH_FORMAT_STRING = "Inputs mismatch - could not find kwarg '{}' in NNCF-wrapped model input " \
"kwargs! " + INPUTS_MISMATCH_WARNING_TEXT
def __init__(self, input_infos: List[ModelInputInfo],
fwd_signature: Signature,
module_ref_for_device: torch.nn.Module = None):
self._module_ref_for_device = module_ref_for_device
arg_iis_list = [ii for ii in input_infos if ii.keyword is None]
kwarg_iis_list = [(ii.keyword, ii) for ii in input_infos if ii.keyword is not None]
kwarg_iis = OrderedDict()
arg_iis = tuple(arg_iis_list)
for kw, ii in kwarg_iis_list:
kwarg_iis[kw] = ii
bound_params = fwd_signature.bind(*arg_iis, **kwarg_iis)
self._fwd_params_to_input_infos_odict = bound_params.arguments
self._fwd_signature = fwd_signature # type: Signature
def set_device(self, device: str):
self._device = device
def wrap_inputs(self, model_args, model_kwargs):
bound_model_params = self._fwd_signature.bind(*model_args, **model_kwargs)
for param_name in self._fwd_params_to_input_infos_odict:
param_kind = self._fwd_signature.parameters[param_name].kind
if param_kind is Parameter.VAR_POSITIONAL or param_kind is Parameter.VAR_KEYWORD:
nncf_logger.warning("An input_info tensor was bound to a *args or **kwargs variadic parameter in the"
"forward's signature! This is currently unsupported by NNCF. Input compression may "
"be incorrect.")
# Currently won't support input info mapping to *args or **kwargs-mapped parameters
continue
if param_name not in bound_model_params.arguments:
nncf_logger.warning("A call to a compressed model's forward occured without one of the params"
"specified in input_infos! Input compression may be incorrect. Trying to recover "
"by wrapping the default value for the parameter.")
bound_model_params.apply_defaults()
potential_tensor = bound_model_params.arguments[param_name]
if potential_tensor is not None:
bound_model_params.arguments[param_name] = nncf_model_input(bound_model_params.arguments[param_name])
else:
# Default was None - cannot wrap as-is. Will wrap a dummy tensor as specified in
# input infos - will conserve the call order of nncf_model_input nodes,
# and the post-hooks for the input node will execute. The result won't go anywhere, though.
nncf_logger.warning("Wrapping a dummy tensor for input {}".format(param_name))
info_for_missing_input = self._fwd_params_to_input_infos_odict[param_name]
device = 'cuda'
if self._module_ref_for_device is not None:
device = next(self._module_ref_for_device.parameters()).device
dummy_tensor = create_mock_tensor(info_for_missing_input, device)
_ = nncf_model_input(dummy_tensor)
return bound_model_params.args, bound_model_params.kwargs
|
py | 1a43d77c0f671d75d79135fc9cb473394a718705 | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='NodeFinderGUI',
version='0.5.0',
description=('GUI Tool for node related operations in '
'phylogenetic analyses.'),
author='Haofei Jin',
author_email='[email protected]',
url='https://github.com/zxjsdp/NodeFinderGUI',
license='Apache',
keywords='node phylogenetic tools calibration clade',
packages=['nodefinder_gui'],
install_requires=[],
# $ pip install -e .[dev,test]
extras_require={
'dev': ['pytest', 'tox', 'sphinx'],
'test': ['pytest'],
},
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
py | 1a43d7ba9bfadb50ecc6b70b16c268ef44614f43 | # -*- coding: utf-8 -*-
import logging
import rest_framework_swagger.renderers as rest_swagger_renderers
from django.core import urlresolvers
from zope.dottedname import resolve
logger = logging.getLogger(__name__)
def resolve_swagger_doc(url, method):
resolve_result = urlresolvers.resolve(url)
swaggerdoc = getattr(resolve_result.func, '__swaggerdoc__', None)
if swaggerdoc:
return swaggerdoc
view_class = resolve.resolve(resolve_result.view_name)
view = getattr(view_class, method.lower(), None)
return getattr(view, '__swaggerdoc__', None)
def overwrite_data(url, method, data, swaggerdoc):
additional_data = dict(swaggerdoc).get(method, {})
try:
data['paths'][url][method].update(additional_data)
except (KeyError, TypeError, AttributeError) as err:
logger.debug('Cannot update swagger data: %r', err)
class SwaggerAdditinalDocRenderer(rest_swagger_renderers.OpenAPIRenderer):
def add_customizations(self, data, renderer_context):
super(SwaggerAdditinalDocRenderer, self).add_customizations(
data, renderer_context)
for url, path in data['paths'].items():
for method, method_data in path.items():
swaggerdoc = resolve_swagger_doc(url, method)
if swaggerdoc:
overwrite_data(url, method, data, swaggerdoc)
|
py | 1a43d8d7bce6d8b4f4e808098480c46ac1e56538 | import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn import ensemble
from sklearn.metrics import mean_absolute_error
from sklearn.externals import joblib
# Load the data set
df = pd.read_csv("ml_house_data_set.csv")
# Remove the fields from the data set that we don't want to include in our model
del df['house_number']
del df['unit_number']
del df['street_name']
del df['zip_code']
# Replace categorical data with one-hot encoded data
features_df = pd.get_dummies(df, columns=['garage_type', 'city'])
# Remove the sale price from the feature data
del features_df['sale_price']
# Create the X and y arrays
X = features_df.as_matrix()
y = df['sale_price'].as_matrix()
# Split the data set in a training set (70%) and a test set (30%)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0)
# Fit regression model
model = ensemble.GradientBoostingRegressor(
n_estimators=1000,
learning_rate=0.1,
max_depth=6,
min_samples_leaf=9,
max_features=0.1,
loss='huber',
random_state=0
)
model.fit(X_train, y_train)
# Save the trained model to a file so we can use it in other programs
joblib.dump(model, 'trained_house_classifier_model.pkl')
# Find the error rate on the training set
mse =
print("Training Set Mean Absolute Error: %.4f" % mse)
# Find the error rate on the test set
mse =
print("Test Set Mean Absolute Error: %.4f" % mse)
|
py | 1a43d8eda3d68ffe1a4b262159c71654f57c5aa5 | import artm
import numpy as np
import shutil
import pytest
import warnings
from ..cooking_machine.models.topic_model import TopicModel
from ..cooking_machine.dataset import Dataset, W_DIFF_BATCHES_1
from ..viewers import top_documents_viewer
NUM_TOPICS = 5
NUM_DOCUMENT_PASSES = 1
NUM_ITERATIONS = 10
class TestTopDocumentsViewer:
""" """
topic_model = None
theta = None
top_documents_viewer = None
@classmethod
def setup_class(cls):
""" """
with warnings.catch_warnings():
warnings.filterwarnings(action="ignore", message=W_DIFF_BATCHES_1)
dataset = Dataset('tests/test_data/test_dataset.csv')
dictionary = dataset.get_dictionary()
batch_vectorizer = dataset.get_batch_vectorizer()
model_artm = artm.ARTM(
num_topics=NUM_TOPICS,
cache_theta=True,
num_document_passes=NUM_DOCUMENT_PASSES,
dictionary=dictionary,
scores=[artm.PerplexityScore(name='PerplexityScore')],)
cls.topic_model = TopicModel(model_artm, model_id='model_id')
cls.topic_model._fit(batch_vectorizer, num_iterations=NUM_ITERATIONS)
cls.theta = cls.topic_model.get_theta(dataset=dataset)
cls.top_documents_viewer = top_documents_viewer.TopDocumentsViewer(model=cls.topic_model)
@classmethod
def teardown_class(cls):
""" """
shutil.rmtree("tests/test_data/test_dataset_batches")
def test_check_output_format(self):
""" """
topics_documents = TestTopDocumentsViewer.top_documents_viewer.view()
assert isinstance(topics_documents, list), 'Result of view() not of type "list"'
assert all(isinstance(topic_documents, list) for topic_documents in topics_documents),\
'Some elements in the result list of view() not of type "list"'
def test_check_output_content(self):
""" """
num_documents = TestTopDocumentsViewer.theta.shape[1]
documents_indices = list(range(num_documents))
topics_documents_from_viewer = TestTopDocumentsViewer.top_documents_viewer.view()
documents_from_viewer = merge_lists(topics_documents_from_viewer)
assert sorted(documents_from_viewer) == documents_indices,\
'Viewer returned as documents "{0}".' \
'But expected to get documents\' indices from "0" to "{1}"'.format(
documents_from_viewer, num_documents - 1)
def test_check_precomputed_distances_parameter_workable(self):
""" """
index_of_topic_to_be_nearest_to_all_documents = 0
distances_all_one_except_to_one_topic = np.ones_like(TestTopDocumentsViewer.theta.values)
distances_all_one_except_to_one_topic[:, index_of_topic_to_be_nearest_to_all_documents] = 0
documents_viewer = top_documents_viewer.TopDocumentsViewer(
model=TestTopDocumentsViewer.topic_model,
precomputed_distances=distances_all_one_except_to_one_topic)
topics_documents = documents_viewer.view()
num_documents_in_nearest_topic = len(
topics_documents[index_of_topic_to_be_nearest_to_all_documents])
num_documents = TestTopDocumentsViewer.theta.shape[1]
assert num_documents_in_nearest_topic == num_documents,\
'Expected to see all documents in one topic.' \
'But the topic has "{}" documents instead of "{}"'.format(
num_documents_in_nearest_topic, num_documents)
@pytest.mark.parametrize("max_num_top_documents", [0, 1])
def test_check_max_top_documents_number_parameter_workable(self, max_num_top_documents):
""" """
documents_viewer = top_documents_viewer.TopDocumentsViewer(
model=TestTopDocumentsViewer.topic_model,
max_top_number=max_num_top_documents)
topics_documents = documents_viewer.view()
assert all(len(topic_documents) <= max_num_top_documents
for topic_documents in topics_documents),\
'Not all top documents lists from "{}" have less elements than required "{}"'.format(
topics_documents, max_num_top_documents)
def test_check_object_clusters_parameter_workable(self):
""" """
num_documents = TestTopDocumentsViewer.theta.shape[1]
cluster_label_to_be_same_for_all_documents = 0
cluster_labels = list(
cluster_label_to_be_same_for_all_documents for _ in range(num_documents))
documents_viewer = top_documents_viewer.TopDocumentsViewer(
model=TestTopDocumentsViewer.topic_model,
object_clusters=cluster_labels)
topics_documents = documents_viewer.view()
num_documents_with_given_cluster_label = len(
topics_documents[cluster_label_to_be_same_for_all_documents])
assert num_documents_with_given_cluster_label == num_documents,\
'Marked all documents with label "{}".' \
'Expected to see all "{}" documents in that topic,' \
'but there are only "{}" documents'.format(
cluster_label_to_be_same_for_all_documents, num_documents,
num_documents_with_given_cluster_label)
@pytest.mark.parametrize("illegal_cluster_label", [-1, NUM_TOPICS])
def test_check_object_clusters_parameter_validates_range_of_input_labels(
self, illegal_cluster_label):
""" """
num_documents = TestTopDocumentsViewer.theta.shape[1]
cluster_labels = list(0 for _ in range(num_documents))
cluster_labels[0] = illegal_cluster_label
with pytest.raises(ValueError):
_ = top_documents_viewer.TopDocumentsViewer(
model=TestTopDocumentsViewer.topic_model,
object_clusters=cluster_labels).view()
def merge_lists(iterable_of_lists):
""" """
result = []
for i in iterable_of_lists:
result += i
return result
|
py | 1a43da525f9b9a5b4547cdfe17ec7540191a42c8 | urlpatterns = [] # pragma: no cover
|
py | 1a43da95333f39766f888385da4242590ca76c39 | import json
from unittest import mock
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.six import b
from wagtail.core.models import Collection, GroupCollectionPermission, Page
from wagtail.documents import models
from wagtail.tests.testapp.models import EventPage, EventPageRelatedLink
from wagtail.tests.utils import WagtailTestUtils
class TestDocumentIndexView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def test_simple(self):
response = self.client.get(reverse('wagtaildocs:index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
self.assertContains(response, "Add a document")
def test_search(self):
response = self.client.get(reverse('wagtaildocs:index'), {'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def make_docs(self):
for i in range(50):
document = models.Document(title="Test " + str(i))
document.save()
def test_pagination(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs:index'), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
# Check that we got the correct page
self.assertEqual(response.context['documents'].number, 2)
def test_pagination_invalid(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs:index'), {'p': 'Hello World!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
# Check that we got page one
self.assertEqual(response.context['documents'].number, 1)
def test_pagination_out_of_range(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs:index'), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
# Check that we got the last page
self.assertEqual(response.context['documents'].number, response.context['documents'].paginator.num_pages)
def test_ordering(self):
orderings = ['title', '-created_at']
for ordering in orderings:
response = self.client.get(reverse('wagtaildocs:index'), {'ordering': ordering})
self.assertEqual(response.status_code, 200)
def test_index_without_collections(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs:index'))
self.assertNotContains(response, '<th>Collection</th>')
self.assertNotContains(response, '<td>Root</td>')
def test_index_with_collection(self):
root_collection = Collection.get_first_root_node()
root_collection.add_child(name="Evil plans")
root_collection.add_child(name="Good plans")
self.make_docs()
response = self.client.get(reverse('wagtaildocs:index'))
self.assertContains(response, '<th>Collection</th>')
self.assertContains(response, '<td>Root</td>')
self.assertEqual(
[collection.name for collection in response.context['collections']],
['Root', 'Evil plans', 'Good plans'])
class TestDocumentAddView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def test_get(self):
response = self.client.get(reverse('wagtaildocs:add'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/add.html')
# as standard, only the root collection exists and so no 'Collection' option
# is displayed on the form
self.assertNotContains(response, '<label for="id_collection">')
# Ensure the form supports file uploads
self.assertContains(response, 'enctype="multipart/form-data"')
def test_get_with_collections(self):
root_collection = Collection.get_first_root_node()
root_collection.add_child(name="Evil plans")
response = self.client.get(reverse('wagtaildocs:add'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/add.html')
self.assertContains(response, '<label for="id_collection">')
self.assertContains(response, "Evil plans")
def test_post(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document",
'file': fake_file,
}
response = self.client.post(reverse('wagtaildocs:add'), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs:index'))
# Document should be created, and be placed in the root collection
document = models.Document.objects.get(title="Test document")
root_collection = Collection.get_first_root_node()
self.assertEqual(
document.collection,
root_collection
)
# Check that the file_size/hash field was set
self.assertTrue(document.file_size)
self.assertTrue(document.file_hash)
def test_post_with_collections(self):
root_collection = Collection.get_first_root_node()
evil_plans_collection = root_collection.add_child(name="Evil plans")
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document",
'file': fake_file,
'collection': evil_plans_collection.id,
}
response = self.client.post(reverse('wagtaildocs:add'), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs:index'))
# Document should be created, and be placed in the Evil Plans collection
self.assertTrue(models.Document.objects.filter(title="Test document").exists())
root_collection = Collection.get_first_root_node()
self.assertEqual(
models.Document.objects.get(title="Test document").collection,
evil_plans_collection
)
class TestDocumentAddViewWithLimitedCollectionPermissions(TestCase, WagtailTestUtils):
def setUp(self):
add_doc_permission = Permission.objects.get(
content_type__app_label='wagtaildocs', codename='add_document'
)
admin_permission = Permission.objects.get(
content_type__app_label='wagtailadmin', codename='access_admin'
)
root_collection = Collection.get_first_root_node()
self.evil_plans_collection = root_collection.add_child(name="Evil plans")
conspirators_group = Group.objects.create(name="Evil conspirators")
conspirators_group.permissions.add(admin_permission)
GroupCollectionPermission.objects.create(
group=conspirators_group,
collection=self.evil_plans_collection,
permission=add_doc_permission
)
user = get_user_model().objects.create_user(
username='moriarty',
email='[email protected]',
password='password'
)
user.groups.add(conspirators_group)
self.client.login(username='moriarty', password='password')
def test_get(self):
response = self.client.get(reverse('wagtaildocs:add'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/add.html')
# user only has access to one collection, so no 'Collection' option
# is displayed on the form
self.assertNotContains(response, '<label for="id_collection">')
def test_post(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document",
'file': fake_file,
}
response = self.client.post(reverse('wagtaildocs:add'), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs:index'))
# Document should be created in the 'evil plans' collection,
# despite there being no collection field in the form, because that's the
# only one the user has access to
self.assertTrue(models.Document.objects.filter(title="Test document").exists())
self.assertEqual(
models.Document.objects.get(title="Test document").collection,
self.evil_plans_collection
)
class TestDocumentEditView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Create a document to edit
self.document = models.Document.objects.create(title="Test document", file=fake_file)
def test_simple(self):
response = self.client.get(reverse('wagtaildocs:edit', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/edit.html')
# Ensure the form supports file uploads
self.assertContains(response, 'enctype="multipart/form-data"')
def test_post(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit title change
post_data = {
'title': "Test document changed!",
'file': fake_file,
}
response = self.client.post(reverse('wagtaildocs:edit', args=(self.document.id,)), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs:index'))
# Document title should be changed
self.assertEqual(models.Document.objects.get(id=self.document.id).title, "Test document changed!")
def test_with_missing_source_file(self):
# Build a fake file
fake_file = ContentFile(b("An ephemeral document"))
fake_file.name = 'to-be-deleted.txt'
# Create a new document to delete the source for
document = models.Document.objects.create(title="Test missing source document", file=fake_file)
document.file.delete(False)
response = self.client.get(reverse('wagtaildocs:edit', args=(document.id,)), {})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/edit.html')
self.assertContains(response, 'File not found')
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_link(self):
response = self.client.get(reverse('wagtaildocs:edit', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/edit.html')
self.assertContains(response, self.document.usage_url)
self.assertContains(response, 'Used 0 times')
def test_reupload_same_name(self):
"""
Checks that reuploading the document file with the same file name
changes the file name, to avoid browser cache issues (see #3816).
"""
old_filename = self.document.file.name
new_name = self.document.filename
new_file = SimpleUploadedFile(new_name, b'An updated test content.')
response = self.client.post(reverse('wagtaildocs:edit', args=(self.document.pk,)), {
'title': self.document.title, 'file': new_file,
})
self.assertRedirects(response, reverse('wagtaildocs:index'))
self.document.refresh_from_db()
self.assertFalse(self.document.file.storage.exists(old_filename))
self.assertTrue(self.document.file.storage.exists(self.document.file.name))
self.assertNotEqual(self.document.file.name, 'documents/' + new_name)
self.assertEqual(self.document.file.read(),
b'An updated test content.')
def test_reupload_different_name(self):
"""
Checks that reuploading the document file with a different file name
correctly uses the new file name.
"""
old_filename = self.document.file.name
new_name = 'test_reupload_different_name.txt'
new_file = SimpleUploadedFile(new_name, b'An updated test content.')
response = self.client.post(reverse('wagtaildocs:edit', args=(self.document.pk,)), {
'title': self.document.title, 'file': new_file,
})
self.assertRedirects(response, reverse('wagtaildocs:index'))
self.document.refresh_from_db()
self.assertFalse(self.document.file.storage.exists(old_filename))
self.assertTrue(self.document.file.storage.exists(self.document.file.name))
self.assertEqual(self.document.file.name, 'documents/' + new_name)
self.assertEqual(self.document.file.read(),
b'An updated test content.')
class TestDocumentDeleteView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create a document to delete
self.document = models.Document.objects.create(title="Test document")
def test_simple(self):
response = self.client.get(reverse('wagtaildocs:delete', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/confirm_delete.html')
def test_delete(self):
# Submit title change
response = self.client.post(reverse('wagtaildocs:delete', args=(self.document.id,)))
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs:index'))
# Document should be deleted
self.assertFalse(models.Document.objects.filter(id=self.document.id).exists())
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_link(self):
response = self.client.get(reverse('wagtaildocs:delete', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/confirm_delete.html')
self.assertContains(response, self.document.usage_url)
self.assertContains(response, 'Used 0 times')
class TestMultipleDocumentUploader(TestCase, WagtailTestUtils):
"""
This tests the multiple document upload views located in wagtaildocs/views/multiple.py
"""
edit_post_data = {
'title': "New title!",
'tags': "",
}
def setUp(self):
self.login()
# Create a document for running tests on
self.doc = models.get_document_model().objects.create(
title="Test document",
file=ContentFile(b("Simple text document")),
)
def check_doc_after_edit(self):
self.doc.refresh_from_db()
self.assertEqual(self.doc.title, "New title!")
self.assertFalse(self.doc.tags.all())
def test_add(self):
"""
This tests that the add view responds correctly on a GET request
"""
# Send request
response = self.client.get(reverse('wagtaildocs:add_multiple'))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/multiple/add.html')
# no collection chooser when only one collection exists
self.assertNotContains(response, '<label for="id_adddocument_collection">')
def test_add_with_collections(self):
root_collection = Collection.get_first_root_node()
root_collection.add_child(name="Evil plans")
# Send request
response = self.client.get(reverse('wagtaildocs:add_multiple'))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/multiple/add.html')
# collection chooser should exisst
self.assertContains(response, '<label for="id_adddocument_collection">')
self.assertContains(response, 'Evil plans')
def test_add_post(self):
"""
This tests that a POST request to the add view saves the document and returns an edit form
"""
response = self.client.post(reverse('wagtaildocs:add_multiple'), {
'files[]': SimpleUploadedFile('test.png', b"Simple text document"),
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtaildocs/multiple/edit_form.html')
# Check document
self.assertIn('doc', response.context)
self.assertEqual(response.context['doc'].title, 'test.png')
self.assertTrue(response.context['doc'].file_size)
self.assertTrue(response.context['doc'].file_hash)
# check that it is in the root collection
doc = models.get_document_model().objects.get(title='test.png')
root_collection = Collection.get_first_root_node()
self.assertEqual(doc.collection, root_collection)
# Check form
self.assertIn('form', response.context)
self.assertEqual(
set(response.context['form'].fields),
set(models.get_document_model().admin_form_fields) - {'file', 'collection'},
)
self.assertEqual(response.context['form'].initial['title'], 'test.png')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('doc_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['doc_id'], response.context['doc'].id)
self.assertTrue(response_json['success'])
# form should not contain a collection chooser
self.assertNotIn('Collection', response_json['form'])
def test_add_post_with_collections(self):
"""
This tests that a POST request to the add view saves the document
and returns an edit form, when collections are active
"""
root_collection = Collection.get_first_root_node()
evil_plans_collection = root_collection.add_child(name="Evil plans")
response = self.client.post(reverse('wagtaildocs:add_multiple'), {
'files[]': SimpleUploadedFile('test.png', b"Simple text document"),
'collection': evil_plans_collection.id
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtaildocs/multiple/edit_form.html')
# Check document
self.assertIn('doc', response.context)
self.assertEqual(response.context['doc'].title, 'test.png')
self.assertTrue(response.context['doc'].file_size)
self.assertTrue(response.context['doc'].file_hash)
# check that it is in the 'evil plans' collection
doc = models.get_document_model().objects.get(title='test.png')
root_collection = Collection.get_first_root_node()
self.assertEqual(doc.collection, evil_plans_collection)
# Check form
self.assertIn('form', response.context)
self.assertEqual(
set(response.context['form'].fields),
set(models.get_document_model().admin_form_fields) - {'file'} | {'collection'},
)
self.assertEqual(response.context['form'].initial['title'], 'test.png')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('doc_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['doc_id'], response.context['doc'].id)
self.assertTrue(response_json['success'])
# form should contain a collection chooser
self.assertIn('Collection', response_json['form'])
def test_add_post_noajax(self):
"""
This tests that only AJAX requests are allowed to POST to the add view
"""
response = self.client.post(reverse('wagtaildocs:add_multiple'))
# Check response
self.assertEqual(response.status_code, 400)
def test_add_post_nofile(self):
"""
This tests that the add view checks for a file when a user POSTs to it
"""
response = self.client.post(reverse('wagtaildocs:add_multiple'), HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 400)
def test_edit_get(self):
"""
This tests that a GET request to the edit view returns a 405 "METHOD NOT ALLOWED" response
"""
# Send request
response = self.client.get(reverse('wagtaildocs:edit_multiple', args=(self.doc.id, )))
# Check response
self.assertEqual(response.status_code, 405)
def test_edit_post(self):
"""
This tests that a POST request to the edit view edits the document
"""
# Send request
response = self.client.post(
reverse('wagtaildocs:edit_multiple', args=(self.doc.id, )),
{'doc-%d-%s' % (self.doc.id, field): data for field, data in self.edit_post_data.items()},
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
)
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('doc_id', response_json)
self.assertNotIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['doc_id'], self.doc.id)
self.assertTrue(response_json['success'])
self.check_doc_after_edit()
def test_edit_post_noajax(self):
"""
This tests that a POST request to the edit view without AJAX returns a 400 response
"""
# Send request
response = self.client.post(reverse('wagtaildocs:edit_multiple', args=(self.doc.id, )), {
('doc-%d-title' % self.doc.id): "New title!",
('doc-%d-tags' % self.doc.id): "",
})
# Check response
self.assertEqual(response.status_code, 400)
def test_edit_post_validation_error(self):
"""
This tests that a POST request to the edit page returns a json document with "success=False"
and a form with the validation error indicated
"""
# Send request
response = self.client.post(reverse('wagtaildocs:edit_multiple', args=(self.doc.id, )), {
('doc-%d-title' % self.doc.id): "", # Required
('doc-%d-tags' % self.doc.id): "",
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtaildocs/multiple/edit_form.html')
# Check that a form error was raised
self.assertFormError(response, 'form', 'title', "This field is required.")
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('doc_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['doc_id'], self.doc.id)
self.assertFalse(response_json['success'])
def test_delete_get(self):
"""
This tests that a GET request to the delete view returns a 405 "METHOD NOT ALLOWED" response
"""
# Send request
response = self.client.get(reverse('wagtaildocs:delete_multiple', args=(self.doc.id, )))
# Check response
self.assertEqual(response.status_code, 405)
def test_delete_post(self):
"""
This tests that a POST request to the delete view deletes the document
"""
# Send request
response = self.client.post(reverse('wagtaildocs:delete_multiple', args=(self.doc.id, )), HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Make sure the document is deleted
self.assertFalse(models.get_document_model().objects.filter(id=self.doc.id).exists())
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('doc_id', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['doc_id'], self.doc.id)
self.assertTrue(response_json['success'])
def test_delete_post_noajax(self):
"""
This tests that a POST request to the delete view without AJAX returns a 400 response
"""
# Send request
response = self.client.post(reverse('wagtaildocs:delete_multiple', args=(self.doc.id, )))
# Check response
self.assertEqual(response.status_code, 400)
@override_settings(WAGTAILDOCS_DOCUMENT_MODEL='tests.CustomDocument')
class TestMultipleCustomDocumentUploader(TestMultipleDocumentUploader):
edit_post_data = dict(TestMultipleDocumentUploader.edit_post_data, description="New description.")
def check_doc_after_edit(self):
super().check_doc_after_edit()
self.assertEqual(self.doc.description, "New description.")
class TestMultipleCustomDocumentUploaderNoCollection(TestMultipleCustomDocumentUploader):
@classmethod
def setUpClass(cls):
super().setUpClass()
Document = models.get_document_model()
fields = tuple(f for f in Document.admin_form_fields if f != 'collection')
cls.__patcher = mock.patch.object(Document, 'admin_form_fields', fields)
cls.__patcher.start()
@classmethod
def tearDownClass(cls):
cls.__patcher.stop()
super().tearDownClass()
class TestDocumentChooserView(TestCase, WagtailTestUtils):
def setUp(self):
self.user = self.login()
def test_simple(self):
response = self.client.get(reverse('wagtaildocs:chooser'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/chooser/chooser.html')
response_json = json.loads(response.content.decode())
self.assertEqual(response_json['step'], 'chooser')
def test_search(self):
response = self.client.get(reverse('wagtaildocs:chooser'), {'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def make_docs(self):
for i in range(50):
document = models.Document(title="Test " + str(i))
document.save()
def test_pagination(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs:chooser'), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/list.html')
# Check that we got the correct page
self.assertEqual(response.context['documents'].number, 2)
def test_pagination_invalid(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs:chooser'), {'p': 'Hello World!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/list.html')
# Check that we got page one
self.assertEqual(response.context['documents'].number, 1)
def test_pagination_out_of_range(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs:chooser'), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/list.html')
# Check that we got the last page
self.assertEqual(response.context['documents'].number, response.context['documents'].paginator.num_pages)
def test_construct_queryset_hook_browse(self):
document = models.Document.objects.create(
title="Test document shown",
uploaded_by_user=self.user,
)
models.Document.objects.create(
title="Test document not shown",
)
def filter_documents(documents, request):
# Filter on `uploaded_by_user` because it is
# the only default FilterField in search_fields
return documents.filter(uploaded_by_user=self.user)
with self.register_hook('construct_document_chooser_queryset', filter_documents):
response = self.client.get(reverse('wagtaildocs:chooser'))
self.assertEqual(len(response.context['documents']), 1)
self.assertEqual(response.context['documents'][0], document)
def test_construct_queryset_hook_search(self):
document = models.Document.objects.create(
title="Test document shown",
uploaded_by_user=self.user,
)
models.Document.objects.create(
title="Test document not shown",
)
def filter_documents(documents, request):
# Filter on `uploaded_by_user` because it is
# the only default FilterField in search_fields
return documents.filter(uploaded_by_user=self.user)
with self.register_hook('construct_document_chooser_queryset', filter_documents):
response = self.client.get(reverse('wagtaildocs:chooser'), {'q': 'Test'})
self.assertEqual(len(response.context['documents']), 1)
self.assertEqual(response.context['documents'][0], document)
def test_index_without_collections(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs:index'))
self.assertNotContains(response, '<th>Collection</th>')
self.assertNotContains(response, '<td>Root</td>')
def test_index_with_collection(self):
root_collection = Collection.get_first_root_node()
root_collection.add_child(name="Evil plans")
self.make_docs()
response = self.client.get(reverse('wagtaildocs:index'))
self.assertContains(response, '<th>Collection</th>')
self.assertContains(response, '<td>Root</td>')
class TestDocumentChooserChosenView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create a document to choose
self.document = models.Document.objects.create(title="Test document")
def test_simple(self):
response = self.client.get(reverse('wagtaildocs:document_chosen', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content.decode())
self.assertEqual(response_json['step'], 'document_chosen')
class TestDocumentChooserUploadView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def test_simple(self):
response = self.client.get(reverse('wagtaildocs:chooser_upload'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/chooser/chooser.html')
response_json = json.loads(response.content.decode())
self.assertEqual(response_json['step'], 'chooser')
def test_post(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document",
'file': fake_file,
}
response = self.client.post(reverse('wagtaildocs:chooser_upload'), post_data)
# Check that the response is the 'document_chosen' step
response_json = json.loads(response.content.decode())
self.assertEqual(response_json['step'], 'document_chosen')
# Document should be created
self.assertTrue(models.Document.objects.filter(title="Test document").exists())
class TestDocumentChooserUploadViewWithLimitedPermissions(TestCase, WagtailTestUtils):
def setUp(self):
add_doc_permission = Permission.objects.get(
content_type__app_label='wagtaildocs', codename='add_document'
)
admin_permission = Permission.objects.get(
content_type__app_label='wagtailadmin', codename='access_admin'
)
root_collection = Collection.get_first_root_node()
self.evil_plans_collection = root_collection.add_child(name="Evil plans")
conspirators_group = Group.objects.create(name="Evil conspirators")
conspirators_group.permissions.add(admin_permission)
GroupCollectionPermission.objects.create(
group=conspirators_group,
collection=self.evil_plans_collection,
permission=add_doc_permission
)
user = get_user_model().objects.create_user(
username='moriarty',
email='[email protected]',
password='password'
)
user.groups.add(conspirators_group)
self.client.login(username='moriarty', password='password')
def test_simple(self):
response = self.client.get(reverse('wagtaildocs:chooser_upload'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/chooser/chooser.html')
response_json = json.loads(response.content.decode())
self.assertEqual(response_json['step'], 'chooser')
# user only has access to one collection -> should not see the collections field
self.assertNotIn('id_collection', response_json['html'])
def test_chooser_view(self):
# The main chooser view also includes the form, so need to test there too
response = self.client.get(reverse('wagtaildocs:chooser'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/chooser/chooser.html')
response_json = json.loads(response.content.decode())
self.assertEqual(response_json['step'], 'chooser')
# user only has access to one collection -> should not see the collections field
self.assertNotIn('id_collection', response_json['html'])
def test_post(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document",
'file': fake_file,
}
response = self.client.post(reverse('wagtaildocs:chooser_upload'), post_data)
# Check that the response is the 'document_chosen' step
response_json = json.loads(response.content.decode())
self.assertEqual(response_json['step'], 'document_chosen')
# Document should be created
doc = models.Document.objects.filter(title="Test document")
self.assertTrue(doc.exists())
# Document should be in the 'evil plans' collection
self.assertEqual(doc.get().collection, self.evil_plans_collection)
class TestUsageCount(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.login()
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_unused_document_usage_count(self):
doc = models.Document.objects.get(id=1)
self.assertEqual(doc.get_usage().count(), 0)
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_used_document_usage_count(self):
doc = models.Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
self.assertEqual(doc.get_usage().count(), 1)
def test_usage_count_does_not_appear(self):
doc = models.Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
response = self.client.get(reverse('wagtaildocs:edit',
args=(1,)))
self.assertNotContains(response, 'Used 1 time')
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_count_appears(self):
doc = models.Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
response = self.client.get(reverse('wagtaildocs:edit',
args=(1,)))
self.assertContains(response, 'Used 1 time')
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_count_zero_appears(self):
response = self.client.get(reverse('wagtaildocs:edit',
args=(1,)))
self.assertContains(response, 'Used 0 times')
class TestGetUsage(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.login()
def test_document_get_usage_not_enabled(self):
doc = models.Document.objects.get(id=1)
self.assertEqual(list(doc.get_usage()), [])
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_unused_document_get_usage(self):
doc = models.Document.objects.get(id=1)
self.assertEqual(list(doc.get_usage()), [])
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_used_document_get_usage(self):
doc = models.Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
self.assertTrue(issubclass(Page, type(doc.get_usage()[0])))
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_page(self):
doc = models.Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
response = self.client.get(reverse('wagtaildocs:document_usage',
args=(1,)))
self.assertContains(response, 'Christmas')
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_page_no_usage(self):
response = self.client.get(reverse('wagtaildocs:document_usage',
args=(1,)))
# There's no usage so there should be no table rows
self.assertRegex(
response.content.decode('utf-8'),
r'<tbody>(\s|\n)*</tbody>'
)
class TestEditOnlyPermissions(TestCase, WagtailTestUtils):
def setUp(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
self.root_collection = Collection.get_first_root_node()
self.evil_plans_collection = self.root_collection.add_child(name="Evil plans")
self.nice_plans_collection = self.root_collection.add_child(name="Nice plans")
# Create a document to edit
self.document = models.Document.objects.create(
title="Test document", file=fake_file, collection=self.nice_plans_collection
)
# Create a user with change_document permission but not add_document
user = get_user_model().objects.create_user(
username='changeonly',
email='[email protected]',
password='password'
)
change_permission = Permission.objects.get(
content_type__app_label='wagtaildocs', codename='change_document'
)
admin_permission = Permission.objects.get(
content_type__app_label='wagtailadmin', codename='access_admin'
)
self.changers_group = Group.objects.create(name='Document changers')
GroupCollectionPermission.objects.create(
group=self.changers_group, collection=self.root_collection,
permission=change_permission
)
user.groups.add(self.changers_group)
user.user_permissions.add(admin_permission)
self.assertTrue(self.client.login(username='changeonly', password='password'))
def test_get_index(self):
response = self.client.get(reverse('wagtaildocs:index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
# user should not get an "Add a document" button
self.assertNotContains(response, "Add a document")
# user should be able to see documents not owned by them
self.assertContains(response, "Test document")
def test_search(self):
response = self.client.get(reverse('wagtaildocs:index'), {'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_get_add(self):
response = self.client.get(reverse('wagtaildocs:add'))
# permission should be denied
self.assertRedirects(response, reverse('wagtailadmin_home'))
def test_get_edit(self):
response = self.client.get(reverse('wagtaildocs:edit', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/edit.html')
# documents can only be moved to collections you have add permission for,
# so the 'collection' field is not available here
self.assertNotContains(response, '<label for="id_collection">')
# if the user has add permission on a different collection,
# they should have option to move the document
add_permission = Permission.objects.get(
content_type__app_label='wagtaildocs', codename='add_document'
)
GroupCollectionPermission.objects.create(
group=self.changers_group, collection=self.evil_plans_collection,
permission=add_permission
)
response = self.client.get(reverse('wagtaildocs:edit', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<label for="id_collection">')
self.assertContains(response, 'Nice plans')
self.assertContains(response, 'Evil plans')
def test_post_edit(self):
# Submit title change
response = self.client.post(
reverse('wagtaildocs:edit', args=(self.document.id,)), {
'title': "Test document changed!",
'file': '',
}
)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs:index'))
# Document title should be changed
self.assertEqual(
models.Document.objects.get(id=self.document.id).title,
"Test document changed!"
)
# collection should be unchanged
self.assertEqual(
models.Document.objects.get(id=self.document.id).collection,
self.nice_plans_collection
)
# if the user has add permission on a different collection,
# they should have option to move the document
add_permission = Permission.objects.get(
content_type__app_label='wagtaildocs', codename='add_document'
)
GroupCollectionPermission.objects.create(
group=self.changers_group, collection=self.evil_plans_collection,
permission=add_permission
)
response = self.client.post(
reverse('wagtaildocs:edit', args=(self.document.id,)), {
'title': "Test document changed!",
'collection': self.evil_plans_collection.id,
'file': '',
}
)
self.assertEqual(
models.Document.objects.get(id=self.document.id).collection,
self.evil_plans_collection
)
def test_get_delete(self):
response = self.client.get(reverse('wagtaildocs:delete', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/confirm_delete.html')
def test_get_add_multiple(self):
response = self.client.get(reverse('wagtaildocs:add_multiple'))
# permission should be denied
self.assertRedirects(response, reverse('wagtailadmin_home'))
|
py | 1a43db0929e254a44756cfcc7634a1e69df7ed56 | import cantera as ct
def test(original_file, new_file):
"""Test written cti file against original cti file.
Arguments
-------------
Original cti file
Newly written cti file
Returns
-----------
List of file properties that do not match
"""
original = ct.Solution(original_file)
new = ct.Solution(new_file)
comparison_list=[]
for i, species in enumerate(new.species_names):
if species in original.species_names:
comparison_list.append(species)
def test_species_def():
num =0
#make sure comparing same species
for i, name1 in enumerate(new.species_names):
#start comparison with same species
new_species= new.species(i)
for j, name2 in enumerate(original.species_names):
if name1.upper().lower() == name2.upper().lower():
original_species=original.species(j)
num += 1
if original_species.name.upper().lower() != new_species.name.upper().lower():
print (j, original_species, i, new_species,)
assert original_species.composition == new_species.composition
assert original_species.thermo.coeffs.all() == new_species.thermo.coeffs.all()
assert original_species.transport.geometry == new_species.transport.geometry
assert original_species.transport.diameter == new_species.transport.diameter
assert original_species.transport.well_depth == new_species.transport.well_depth
assert original_species.transport.polarizability == new_species.transport.polarizability
assert original_species.transport.rotational_relaxation == new_species.transport.rotational_relaxation
assert original_species.transport.dipole == new_species.transport.dipole
print ('done with testing species definition info \n\n\n')
def test_reactions():
c=4184.0
num = 0
print 'Any errors shown below:\n'
for k, name1 in enumerate(new.reaction_equations()):
num += 1
new_reaction=new.reaction(k)
new_equation_type = type(new_reaction).__name__
for l, name2 in enumerate(original.reaction_equations()):
if original.reaction(l).equation == new_reaction.equation:
original_reaction=original.reaction(l)
original_equation_type=type(original_reaction).__name__
assert new_equation_type == original_equation_type
try:
if new_reaction.rate.pre_exponential_factor != original_reaction.rate.pre_exponential_factor:
#if new_reaction.rate.pre_exponential_factor/ original_reaction.rate.pre_exponential_factor > .004:
print (k, (new_reaction.rate.pre_exponential_factor/ original_reaction.rate.pre_exponential_factor), new_reaction.reaction_type, new_reaction.rate.temperature_exponent, (new_reaction.rate.activation_energy/c) , new_reaction )
except AttributeError as e:
if str(e) == '\'cantera._cantera.FalloffReaction\' object has no attribute \'rate\'':
continue
#assert new_reaction.efficiencies == original_reaction.efficiencies
print ('\ndone with testing equation info ')
test_species_def()
test_reactions()
test('gri301.cti', 'pym_gri30.cti')
|
py | 1a43dc3a1178c2b7ec43fcbd357897bda2bc5638 | #!/usr/bin/env python
#
# Copyright 2005,2006,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gru, eng_notation, filter
from gnuradio import audio
from gnuradio import analog
from gnuradio import blocks
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import sys
import math
class wfm_rx_block (gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
usage = "usage: %prog [options] input-samples-320kS.dat output.wav"
parser=OptionParser(option_class=eng_option, usage=usage)
parser.add_option("-V", "--volume", type="eng_float", default=None,
help="set volume (default is midpoint)")
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
sys.exit(1)
input_filename = args[0]
output_filename = args[1]
self.vol = 0
# build graph
self.src = blocks.file_source(gr.sizeof_gr_complex, input_filename, False)
adc_rate = 64e6 # 64 MS/s
usrp_decim = 200
usrp_rate = adc_rate / usrp_decim # 320 kS/s
chanfilt_decim = 1
demod_rate = usrp_rate / chanfilt_decim
audio_decimation = 10
audio_rate = demod_rate / audio_decimation # 32 kHz
chan_filt_coeffs = filter.optfir.low_pass (1, # gain
usrp_rate, # sampling rate
80e3, # passband cutoff
115e3, # stopband cutoff
0.1, # passband ripple
60) # stopband attenuation
#print len(chan_filt_coeffs)
chan_filt = filter.fir_filter_ccf (chanfilt_decim, chan_filt_coeffs)
#self.guts = analog.wfm_rcv (demod_rate, audio_decimation)
self.guts = analog.wfm_rcv_pll (demod_rate, audio_decimation)
# FIXME rework {add,multiply}_const_* to handle multiple streams
self.volume_control_l = blocks.multiply_const_ff(self.vol)
self.volume_control_r = blocks.multiply_const_ff(self.vol)
# wave file as final sink
if 1:
sink = blocks.wavfile_sink(output_filename, 2, int(audio_rate), 16)
else:
sink = audio.sink (int (audio_rate),
options.audio_output,
False) # ok_to_block
# now wire it all together
self.connect (self.src, chan_filt, self.guts)
self.connect ((self.guts, 0), self.volume_control_l, (sink, 0))
self.connect ((self.guts, 1), self.volume_control_r, (sink, 1))
try:
self.guts.stereo_carrier_pll_recovery.squelch_enable(True)
except:
pass
#print "FYI: This implementation of the stereo_carrier_pll_recovery has no squelch implementation yet"
if options.volume is None:
g = self.volume_range()
options.volume = float(g[0]+g[1])/2
# set initial values
self.set_vol(options.volume)
try:
self.guts.stereo_carrier_pll_recovery.set_lock_threshold(options.squelch)
except:
pass
#print "FYI: This implementation of the stereo_carrier_pll_recovery has no squelch implementation yet"
def set_vol (self, vol):
g = self.volume_range()
self.vol = max(g[0], min(g[1], vol))
self.volume_control_l.set_k(10**(self.vol/10))
self.volume_control_r.set_k(10**(self.vol/10))
def volume_range(self):
return (-20.0, 0.0, 0.5)
if __name__ == '__main__':
tb = wfm_rx_block()
try:
tb.run()
except KeyboardInterrupt:
pass
|
py | 1a43dc6f52c9ef04e265bcd9ba666008d0c7281f |
import tifffile
import tqdm
import os
import numpy as np
import sys
import fnmatch
def scandir(path, pat, cb):
for root, dirs, files in os.walk(path):
head, tail = os.path.split(root)
for file in files:
if fnmatch.fnmatch(file, pat):
fn = os.path.join(root, file)
cb(fn)
def sumframes(tiffinput, tiffoutput, numframes):
print (tiffoutput)
with tifffile.TiffWriter(tiffoutput) as out_tif:
with tifffile.TiffFile(tiffinput) as in_tif:
total = len(in_tif.pages)
framesum = in_tif.pages[0].asarray()*0
n = 0
for f in range(total):
framesum += in_tif.pages[f].asarray()
n += 1
if (n==numframes):
out_tif.save(framesum.astype(dtype=np.uint16))
sys.stdout.write(f"\rframe {f}/{total} ({f/total*100:.2f}%)")
n=0
framesum *= 0
print()
def _process(args):
path, outdir, numframes = args
print(f"pid={os.getpid()}: {path}")
filename = os.path.split(path)[1]
os.makedirs(outdir, exist_ok=True)
outfile = outdir + filename
sumframes(path,outfile,numframes)
def sumframes_dir(inputdir, outputdir, numframes):
params = []
def cb(fn):
args=[ fn, outputdir, numframes]
#params.append(args)
_process(args)
scandir(inputdir, "*.tif", cb)
# p = Pool(8)
# p.map(_process, params)
if __name__ == "__main__":
sumframes_dir('O:/mod/', 'O:/mod-sumframes/', 6)
# split_tiff('../../../SMLM/data/gattaquant 80nm thirdshift/80nm-3rdshift-1_0.tif',
# '../../../SMLM/data/gattaquant 80nm thirdshift/80nm-3rdshift-psplit-1_0.tif', 0.5, 100.2, 1/0.47, 300,
# '../../../SMLM/data/gattaquant 80nm thirdshift/80nm-3rdshift-psplit-1_0-bg.tif') |
py | 1a43dc850c999408b2bfdd05636fc481bf8dc2ea | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# author: RShirohara
import argparse
import inspect
import sys
import novelconverter
_EXTENSION = (
"markdown",
"ddmarkdown",
"pixiv"
)
def get_args():
_parser = argparse.ArgumentParser(
description=novelconverter.__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter
)
_parser.add_argument(
"from_format", type=str, help="Format of the original text"
)
_parser.add_argument(
"to_format", type=str, help="Format of the output text"
)
_parser.add_argument(
"-o", "--output", type=str, help="File path of the output text"
)
_parser.add_argument(
"-i", "--input", type=str, help="File path of the original text"
)
args = _parser.parse_args()
return args
def load_data(path):
if path:
with open(path, "r") as _f:
source = _f.read()
else:
source = sys.stdin.read()
return source
def export_data(source, path):
if path:
with open(path, "w") as _f:
_f.write(source)
else:
print(source)
def load_extension(ext_name, proc_name):
# ext_nameが存在しているか確認
if ext_name not in _EXTENSION:
raise ValueError(f"No extension named {ext_name} exists.")
ext = eval(f"novelconverter.extension.{ext_name}")
_in_processor = [
x[0] for x in inspect.getmembers(ext, inspect.isfunction)
]
processor = {
x.replace("build_", ""): eval(f"ext.{x}", {"ext": ext})
for x in _in_processor
}
# proc_nameが存在するかを確認
if proc_name not in processor.keys():
sys.stderr.write(f"No processor named {proc_name} exists.\n")
return novelconverter.util.Processor()
return processor[proc_name]()
class NovelConverter(novelconverter.NovelConverter):
def build_registry(self, from_form, to_form):
self.inlineparser = load_extension(from_form, "inlineparser")
self.blockparser = load_extension(from_form, "blockparser")
self.renderer = load_extension(to_form, "renderer")
self.preprocessor = load_extension(from_form, "preprocessor")
self.postprocessor = load_extension(to_form, "postprocessor")
def main():
args = get_args()
nv = NovelConverter()
nv.build_registry(args.from_format, args.to_format)
source = load_data(args.input)
result = nv.convert(source)
export_data(result, args.output)
if __name__ == "__main__":
main()
|
py | 1a43de7bddc714c1ee2f03b82d336b6fea4cc1f2 | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Mike Place <[email protected]>`
'''
# Import python libs
from __future__ import absolute_import
import copy
import os
# Import Salt Testing libs
from salttesting import TestCase, skipIf
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import NO_MOCK, NO_MOCK_REASON, patch, MagicMock
# Import salt libs
from salt import minion
from salt.utils import event
from salt.exceptions import SaltSystemExit
import salt.syspaths
import tornado
ensure_in_syspath('../')
__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MinionTestCase(TestCase):
def test_invalid_master_address(self):
with patch.dict(__opts__, {'ipv6': False, 'master': float('127.0'), 'master_port': '4555', 'retry_dns': False}):
self.assertRaises(SaltSystemExit, minion.resolve_dns, __opts__)
@skipIf(os.geteuid() != 0, 'You must be logged in as root to run this test')
def test_sock_path_len(self):
'''
This tests whether or not a larger hash causes the sock path to exceed
the system's max sock path length. See the below link for more
information.
https://github.com/saltstack/salt/issues/12172#issuecomment-43903643
'''
opts = {
'id': 'salt-testing',
'hash_type': 'sha512',
'sock_dir': os.path.join(salt.syspaths.SOCK_DIR, 'minion'),
'extension_modules': ''
}
with patch.dict(__opts__, opts):
try:
event_publisher = event.AsyncEventPublisher(__opts__)
result = True
except SaltSystemExit:
result = False
self.assertTrue(result)
# Tests for _handle_decoded_payload in the salt.minion.Minion() class: 3
def test_handle_decoded_payload_jid_match_in_jid_queue(self):
'''
Tests that the _handle_decoded_payload function returns when a jid is given that is already present
in the jid_queue.
Note: This test doesn't contain all of the patch decorators above the function like the other tests
for _handle_decoded_payload below. This is essential to this test as the call to the function must
return None BEFORE any of the processes are spun up because we should be avoiding firing duplicate
jobs.
'''
mock_opts = {'cachedir': '',
'extension_modules': ''}
mock_data = {'fun': 'foo.bar',
'jid': 123}
mock_jid_queue = [123]
try:
minion = salt.minion.Minion(mock_opts, jid_queue=copy.copy(mock_jid_queue), io_loop=tornado.ioloop.IOLoop())
ret = minion._handle_decoded_payload(mock_data)
self.assertEqual(minion.jid_queue, mock_jid_queue)
self.assertIsNone(ret)
finally:
minion.destroy()
@patch('salt.minion.Minion.ctx', MagicMock(return_value={}))
@patch('salt.utils.process.SignalHandlingMultiprocessingProcess.start', MagicMock(return_value=True))
@patch('salt.utils.process.SignalHandlingMultiprocessingProcess.join', MagicMock(return_value=True))
def test_handle_decoded_payload_jid_queue_addition(self):
'''
Tests that the _handle_decoded_payload function adds a jid to the minion's jid_queue when the new
jid isn't already present in the jid_queue.
'''
mock_jid = 11111
mock_opts = {'cachedir': '',
'extension_modules': '',
'minion_jid_queue_hwm': 100}
mock_data = {'fun': 'foo.bar',
'jid': mock_jid}
mock_jid_queue = [123, 456]
try:
minion = salt.minion.Minion(mock_opts, jid_queue=copy.copy(mock_jid_queue), io_loop=tornado.ioloop.IOLoop())
# Assert that the minion's jid_queue attribute matches the mock_jid_queue as a baseline
# This can help debug any test failures if the _handle_decoded_payload call fails.
self.assertEqual(minion.jid_queue, mock_jid_queue)
# Call the _handle_decoded_payload function and update the mock_jid_queue to include the new
# mock_jid. The mock_jid should have been added to the jid_queue since the mock_jid wasn't
# previously included. The minion's jid_queue attribute and the mock_jid_queue should be equal.
minion._handle_decoded_payload(mock_data)
mock_jid_queue.append(mock_jid)
self.assertEqual(minion.jid_queue, mock_jid_queue)
finally:
minion.destroy()
@patch('salt.minion.Minion.ctx', MagicMock(return_value={}))
@patch('salt.utils.process.SignalHandlingMultiprocessingProcess.start', MagicMock(return_value=True))
@patch('salt.utils.process.SignalHandlingMultiprocessingProcess.join', MagicMock(return_value=True))
def test_handle_decoded_payload_jid_queue_reduced_minion_jid_queue_hwm(self):
'''
Tests that the _handle_decoded_payload function removes a jid from the minion's jid_queue when the
minion's jid_queue high water mark (minion_jid_queue_hwm) is hit.
'''
mock_opts = {'cachedir': '',
'extension_modules': '',
'minion_jid_queue_hwm': 2}
mock_data = {'fun': 'foo.bar',
'jid': 789}
mock_jid_queue = [123, 456]
try:
minion = salt.minion.Minion(mock_opts, jid_queue=copy.copy(mock_jid_queue), io_loop=tornado.ioloop.IOLoop())
# Assert that the minion's jid_queue attribute matches the mock_jid_queue as a baseline
# This can help debug any test failures if the _handle_decoded_payload call fails.
self.assertEqual(minion.jid_queue, mock_jid_queue)
# Call the _handle_decoded_payload function and check that the queue is smaller by one item
# and contains the new jid
minion._handle_decoded_payload(mock_data)
self.assertEqual(len(minion.jid_queue), 2)
self.assertEqual(minion.jid_queue, [456, 789])
finally:
minion.destroy()
if __name__ == '__main__':
from integration import run_tests
run_tests(MinionTestCase, needs_daemon=False)
|
py | 1a43de8807f039d656a0a5f550bd0ead9a82f2b2 | from logging import exception
import unittest
import warnings
from perfecto.test import TestResultFactory
import pytest
import sys
import time
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from appium import webdriver
from selenium.common.exceptions import NoSuchElementException
import random
import sys
import allure
import string
if 'perfecto_libs' not in sys.path:
sys.path.append(f'../libs/perfecto_libs')
pytestmark = [pytest.mark.sanity, pytest.mark.interop, pytest.mark.android, pytest.mark.interop_and, pytest.mark.client_connect
,pytest.mark.interop_uc_sanity, pytest.mark.nat]
from android_lib import closeApp, set_APconnMobileDevice_android, Toggle_AirplaneMode_android, ForgetWifiConnection, openApp, get_ip_address_and
setup_params_general = {
"mode": "NAT",
"ssid_modes": {
"wpa": [{"ssid_name": "ssid_wpa_2g", "appliedRadios": ["2G"], "security_key": "something"},
{"ssid_name": "ssid_wpa_5g", "appliedRadios": ["5G"],
"security_key": "something"}],
"open": [{"ssid_name": "ssid_open_2g", "appliedRadios": ["2G"]},
{"ssid_name": "ssid_open_5g", "appliedRadios": ["5G"]}],
"wpa2_personal": [
{"ssid_name": "ssid_wpa2_2g", "appliedRadios": ["2G"], "security_key": "something"},
{"ssid_name": "ssid_wpa2_5g", "appliedRadios": ["5G"],
"security_key": "something"}]},
"rf": {},
"radius": False
}
for sec_modes in setup_params_general['ssid_modes'].keys():
for i in range(len(setup_params_general['ssid_modes'][sec_modes])):
N = 3
rand_string = (''.join(random.choices(string.ascii_uppercase +
string.digits, k=N)))+str(int(time.time_ns())%10000)
setup_params_general['ssid_modes'][sec_modes][i]['ssid_name'] = setup_params_general['ssid_modes'][sec_modes][i]['ssid_name'] + "_"+ rand_string
@allure.suite(suite_name="interop sanity")
@allure.sub_suite(sub_suite_name="Nat Mode Client Connect : Suite-A")
@pytest.mark.InteropsuiteA
@allure.feature("NAT MODE CLIENT CONNECT")
@pytest.mark.parametrize(
'setup_profiles',
[setup_params_general],
indirect=True,
scope="class"
)
@pytest.mark.usefixtures("setup_profiles")
class TestNatModeConnectSuiteOne(object):
""" Client Connect SuiteA
pytest -m "client_connect and nat and InteropsuiteA"
"""
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4510", name="WIFI-4510")
@pytest.mark.fiveg
@pytest.mark.wpa2_personal
def test_ClientConnect_5g_WPA2_Personal_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general["ssid_modes"]["wpa2_personal"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4509", name="WIFI-4509")
@pytest.mark.twog
@pytest.mark.wpa2_personal
def test_ClientConnect_2g_WPA2_Personal_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general["ssid_modes"]["wpa2_personal"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4507", name="WIFI-4507")
@pytest.mark.fiveg
@pytest.mark.wpa
def test_ClientConnect_5g_WPA_Personal_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general["ssid_modes"]["wpa"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4505", name="WIFI-4505")
@pytest.mark.twog
@pytest.mark.wpa
def test_ClientConnect_2g_WPA_Personal_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general["ssid_modes"]["wpa"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4504", name="WIFI-4504")
@pytest.mark.fiveg
@pytest.mark.open
def test_ClientConnect_5g_Open_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general["ssid_modes"]["open"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = "[BLANK]"
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
#Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4503", name="WIFI-4503")
@pytest.mark.twog
@pytest.mark.open
def test_ClientConnect_2g_Open_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general["ssid_modes"]["open"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = "[BLANK]"
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
setup_params_general_two = {
"mode": "NAT",
"ssid_modes": {
"wpa3_personal": [
{"ssid_name": "ssid_wpa3_p_2g", "appliedRadios": ["2G"], "security_key": "something"},
{"ssid_name": "ssid_wpa3_p_5g", "appliedRadios": ["5G"],
"security_key": "something"}],
"wpa3_personal_mixed": [
{"ssid_name": "ssid_wpa3_p_m_2g", "appliedRadios": ["2G"], "security_key": "something"},
{"ssid_name": "ssid_wpa3_p_m_5g", "appliedRadios": ["5G"],
"security_key": "something"}],
"wpa_wpa2_personal_mixed": [
{"ssid_name": "ssid_wpa_wpa2_p_m_2g", "appliedRadios": ["2G"], "security_key": "something"},
{"ssid_name": "ssid_wpa_wpa2_p_m_5g", "appliedRadios": ["5G"],
"security_key": "something"}]
},
"rf": {},
"radius": False
}
for sec_modes in setup_params_general_two['ssid_modes'].keys():
for i in range(len(setup_params_general_two['ssid_modes'][sec_modes])):
N = 3
rand_string = (''.join(random.choices(string.ascii_uppercase +
string.digits, k=N)))+str(int(time.time_ns())%10000)
setup_params_general_two['ssid_modes'][sec_modes][i]['ssid_name'] = setup_params_general_two['ssid_modes'][sec_modes][i]['ssid_name'] + "_"+ rand_string
@allure.suite(suite_name="interop sanity")
@allure.sub_suite(sub_suite_name="NAT Mode Client Connect : Suite-B")
@pytest.mark.InteropsuiteB
@allure.feature("NAT MODE CLIENT CONNECT")
@pytest.mark.parametrize(
'setup_profiles',
[setup_params_general_two],
indirect=True,
scope="class"
)
@pytest.mark.usefixtures("setup_profiles")
class TestBridgeModeConnectSuiteTwo(object):
""" Client Connect SuiteA
pytest -m "client_connect and nat and InteropsuiteB"
"""
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4516", name="WIFI-4516")
@pytest.mark.fiveg
@pytest.mark.wpa3_personal
def test_ClientConnect_5g_wpa3_personal_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general_two["ssid_modes"]["wpa3_personal"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4515", name="WIFI-4515")
@pytest.mark.twog
@pytest.mark.wpa3_personal
def test_ClientConnect_2g_wpa3_personal_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general_two["ssid_modes"]["wpa3_personal"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4521", name="WIFI-4521")
@pytest.mark.fiveg
@pytest.mark.wpa3_personal_mixed
def test_ClientConnect_5g_wpa3_personal_mixed_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general_two["ssid_modes"]["wpa3_personal_mixed"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4519", name="WIFI-4519")
@pytest.mark.twog
@pytest.mark.wpa3_personal_mixed
def test_ClientConnect_2g_wpa3_personal_mixed_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general_two["ssid_modes"]["wpa3_personal_mixed"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4524", name="WIFI-4524")
@pytest.mark.fiveg
@pytest.mark.wpa_wpa2_personal_mixed
def test_ClientConnect_5g_wpa_wpa2_personal_mixed_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general_two["ssid_modes"]["wpa_wpa2_personal_mixed"][1]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
#Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
@allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-4523", name="WIFI-4523")
@pytest.mark.twog
@pytest.mark.wpa_wpa2_personal_mixed
def test_ClientConnect_2g_wpa_wpa2_personal_mixed_Nat(self, request, get_vif_state, get_ap_logs,
get_ToggleAirplaneMode_data, setup_perfectoMobile_android):
profile_data = setup_params_general_two["ssid_modes"]["wpa_wpa2_personal_mixed"][0]
ssidName = profile_data["ssid_name"]
ssidPassword = profile_data["security_key"]
print ("SSID_NAME: " + ssidName)
print ("SSID_PASS: " + ssidPassword)
get_vif_state.append(ssidName)
if ssidName not in get_vif_state:
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
report = setup_perfectoMobile_android[1]
driver = setup_perfectoMobile_android[0]
connData = get_ToggleAirplaneMode_data
# Set Wifi/AP Mode
ip, is_internet = get_ip_address_and(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
if ip:
if is_internet:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "with internet")
else:
text_body = ("connected to " + ssidName + " (" + ip + ") " + "without internet")
print(text_body)
allure.attach(name="Connection Status: ", body=str(text_body))
assert True
else:
allure.attach(name="Connection Status: ", body=str("Device is Unable to connect"))
assert False
|
py | 1a43de95a48a19b0a3d69f84e6fa7938019ee06a | #!/usr/bin/env python
import sys, time, re
from splunklib.searchcommands import \
dispatch, EventingCommand, Configuration, Option, validators
from libtf.logparsers import TFAuthLog, TFHttpLog, TFGenericLog
import ConfigParser
import os
import StringIO
import subprocess
@Configuration()
class ReaperCommand(EventingCommand):
""" Filters out noise from Splunk queries by leveraging the Threshing Floor
API.
##Syntax
.. code-block::
reaper logtype=<http, auth, generic> <port=<int>:<'udp|tcp'>>
##Description
The :code:`reaper` command filters network security noise from HTTP logs,
ssh access logs, and generic log files.
"""
BASE_URI = "https://api.threshingfloor.io"
API_KEY = ""
logtype = Option(
doc='''**Syntax:** **type'=***<event-type>*
**Description:** The type of events you wish to reduce. Can be `http`, `auth`, or `generic`.''',
name='type',
validate=validators.Set('http', 'auth', 'generic'))
ports = Option()
def get_config(self, conf_file_name, section):
env = dict()
env.update(os.environ)
splunk_home = env.get('SPLUNK_HOME', '/Applications/Splunk')
btool = os.path.join(splunk_home, "bin", "btool")
tmp = subprocess.Popen([btool, conf_file_name, "list"],
stdout=subprocess.PIPE, env=env)
(output, error) = tmp.communicate()
f = StringIO.StringIO()
f.write(output)
f.seek(0)
cfgparse = ConfigParser.RawConfigParser()
cfgparse.readfp(f)
cfg = dict()
for opt in cfgparse.options(section):
cfg[opt] = cfgparse.get(section, opt)
return cfg
def transform(self, events):
# We have like, 3 copies of the events which is not optimal
dictEvent = []
rawEvents = []
# Save off the events so they can be parsed by the library
for event in events:
dictEvent.append(event)
rawEvents.append(event['_raw'])
# Set to generic mode if ports are present and no type is specified
if self.logtype == None and self.ports != None:
self.logtype = 'generic'
else:
self.logtype = self.guessType(rawEvents)
# Send an error if
if self.logtype == 'generic' and self.ports == None:
raise Exception("Generic mode requires the port option.")
# Get the ports of we have them
if self.ports:
ports = self.ports.split(";")
# Initialize the correct log type
if self.logtype == 'auth':
analyzed = TFAuthLog(rawEvents, self.API_KEY, self.BASE_URI)
elif self.logtype == 'http':
analyzed = TFHttpLog(rawEvents, self.API_KEY, self.BASE_URI)
elif self.logtype == 'generic':
analyzed = TFGenericLog(rawEvents, ports, self.API_KEY, self.BASE_URI)
else:
raise TFException("Failed to parse the query.")
reduced = analyzed.reduce()
reducedItem = reduced.next()
for i in range(0, len(dictEvent)):
if dictEvent[i]['_raw'] == reducedItem:
yield dictEvent[i]
reducedItem = reduced.next()
return
def guessType(self, logfile, baseName=None):
REGEX_HTTP = "^\[(?P<timestamp>.+)?\]\s\"(?P<request>.+?)\"\s(?P<responseCode>\d+)\s(?P<size>\d+)(?P<combinedFields>.*)"
# If we can't do that, we will read 10 lines in, then try to match with a regular expression
logline = logfile[min(10, len(logfile)-1)]
try:
# See if it's http
splitLine = logline.split()
m = re.search(REGEX_HTTP, " ".join(splitLine[3:]))
if m:
return 'http'
# See if it's auth
try:
# Try and make a timestamp from the beginning of the line
if int(time.mktime(time.strptime(" ".join(splitLine[0:3]) + " " + "2017", "%b %d %H:%M:%S %Y"))) > 0:
return 'auth'
except Exception as e:
pass
# If we haven't returned by now, we can't figure out the type
raise TFException("Unable to automatically identify the log type. Please specify a type with the -t flag.")
except IOError as e:
exit()
def __init__(self):
EventingCommand.__init__(self)
conf = self.get_config('threshingfloor', 'api-config')
self.BASE_URI = conf.get('base_uri', None)
self.API_KEY = conf.get('api_key', None)
dispatch(ReaperCommand, sys.argv, sys.stdin, sys.stdout, __name__)
|
py | 1a43df1be06741a12fc10e5d3b854ad6d00384d3 | #!/usr/bin/env python
"""Process that loads the datastore"""
__author__ = 'Michael Meisinger, Thomas Lennan'
"""
Possible Features
- load objects into different datastores
- load from a directory of YML files in ion-definitions
- load from a ZIP of YMLs
- load an additional directory (not under GIT control)
- change timestamp for resources
- load a subset of objects by type, etc
"""
from pyon.public import CFG, log, ImmediateProcess, iex
from pyon.datastore import datastore_admin
from pyon.core import bootstrap
from pyon.core.bootstrap import get_sys_name
class DatastoreAdmin(ImmediateProcess):
"""
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=clear prefix=ion
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dump path=res/preload/local/my_dump
bin/pycc -fc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=load path=res/preload/local/my_dump
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dumpres
"""
def on_init(self):
pass
def on_start(self):
# print env temporarily to debug cei
import os
log.info('ENV vars: %s' % str(os.environ))
op = self.CFG.get("op", None)
datastore = self.CFG.get("datastore", None)
path = self.CFG.get("path", None)
prefix = self.CFG.get("prefix", get_sys_name()).lower()
log.info("DatastoreLoader: {op=%s, datastore=%s, path=%s, prefix=%s}" % (op, datastore, path, prefix))
self.da = datastore_admin.DatastoreAdmin()
if op:
if op == "load":
self.da.load_datastore(path, datastore, ignore_errors=False)
elif op == "dump":
self.da.dump_datastore(path, datastore)
elif op == "dumpres":
from ion.util.datastore.resources import ResourceRegistryHelper
rrh = ResourceRegistryHelper()
rrh.dump_resources_as_xlsx(path)
elif op == "blame":
# TODO make generic
self.da.get_blame_objects()
elif op == "clear":
self.da.clear_datastore(datastore, prefix)
else:
raise iex.BadRequest("Operation unknown")
else:
raise iex.BadRequest("No operation specified")
def on_quit(self):
pass
DatastoreLoader = DatastoreAdmin
|
py | 1a43e26a57dfc88070065df34058ae070880ea2e | import time
import numpy as np
from scipy import optimize
from statsmodels.distributions.empirical_distribution import ECDF
from numba import njit, prange, double, int64, boolean
# plotting
import matplotlib.pyplot as plt
plt.style.use('seaborn-whitegrid')
prop_cycle = plt.rcParams['axes.prop_cycle']
colors = prop_cycle.by_key()['color']
# consav
from consav.misc import nonlinspace
from consav.misc import normal_gauss_hermite
from consav import linear_interp
from consav import ModelClass # only used with numba
##########################
# 1. pure Python version #
##########################
class ConsumptionSavingModelClass():
#########
# setup #
#########
def __init__(self,name='baseline',solmethod='EGM',**kwargs):
""" setup model sub-classes and parameters in .par """
# a. set baseline parameters
self.name = name
self.solmethod = solmethod
# parameters and grids
class ParClass: None
self.par = ParClass()
# solution
class SolClass: None
self.sol = SolClass()
# simulation
class SimClass: None
self.sim = SimClass()
self.setup()
# b. update parameters
for key,val in kwargs.items():
setattr(self.par,key,val) # like par.key = val
def setup(self):
""" baseline parameters in .par """
par = self.par
# a. demographics
par.T = 200
par.TR = par.T # retirement age (end-of-period), no retirement if TR = T
par.age_min = 25 # only relevant for figures
# b. preferences
par.rho = 2
par.beta = 0.96
# c. income parameters
# growth
par.G = 1.02
# standard deviations
par.sigma_xi = 0.1
par.sigma_psi = 0.1
# low income shock
par.low_p = 0.005 # called pi in slides
par.low_val = 0.0 # called mu in slides
# life-cycle
par.L = np.ones(par.T) # if ones then no life-cycle
# d. saving and borrowing
par.R = 1.04
par.borrowingfac = 0.0
# e. numerical integration and grids
par.a_max = 20.0 # maximum point i grid for a
par.a_phi = 1.1 # curvature parameters
par.m_max = 20.0 # maximum point i grid for m
par.m_phi = 1.1 # curvature parameters
# number of elements
par.Nxi = 8 # number of quadrature points for xi
par.Npsi = 8 # number of quadrature points for psi
par.Na = 500 # number of points in grid for a
par.Nm = 100 # number of points in grid for m
# f. simulation
par.sim_mini = 2.5 # initial m in simulation
par.simN = 100_000 # number of persons in simulation
par.simT = 100 # number of periods in simulation
par.simlifecycle = 0 # = 0 simulate infinite horizon model
def create_grids(self):
""" create grids and other preperations for solving the model"""
par = self.par
# a. perfect foresight or buffer-stock model
if par.sigma_xi == 0 and par.sigma_psi == 0 and par.low_p == 0: # no risk
self.model = 'pf' # perfect foresight
else:
self.model = 'bs' # buffer-stock
# b. shocks
# i. basic GuassHermite
psi, psi_w = normal_gauss_hermite(sigma=par.sigma_psi,n=par.Npsi)
xi, xi_w = normal_gauss_hermite(sigma=par.sigma_xi,n=par.Nxi)
# ii. add low income shock to xi
if par.low_p > 0:
# a. weights
xi_w *= (1.0-par.low_p)
xi_w = np.insert(xi_w,0,par.low_p)
# b. values
xi = (xi-par.low_val*par.low_p)/(1.0-par.low_p)
xi = np.insert(xi,0,par.low_val)
# iii. vectorize tensor product of shocks and total weight
psi_vec,xi_vec = np.meshgrid(psi,xi,indexing='ij')
psi_w_vec,xi_w_vec = np.meshgrid(psi_w,xi_w,indexing='ij')
par.psi_vec = psi_vec.ravel()
par.xi_vec = xi_vec.ravel()
par.w = xi_w_vec.ravel()*psi_w_vec.ravel()
assert 1-np.sum(par.w) < 1e-8 # == summing to 1
# iv. count number of shock nodes
par.Nshocks = par.w.size
# c. minimum a
if par.borrowingfac == 0:
par.a_min = np.zeros(par.T) # never any borriwng
else:
# using formula from slides
psi_min = np.min(par.psi_vec)
xi_min = np.min(par.xi_vec)
par.a_min = np.nan*np.ones(par.T)
for t in reversed(range(par.T-1)):
if t >= par.TR-1: # in retirement
Omega = 0
elif t == par.TR-2: # next period is retirement
Omega = par.R**(-1)*par.G*par.L[t+1]*psi_min*xi_min
else: # before retirement
Omega = par.R**(-1)*(np.fmin(Omega,par.borrowingfac)+xi_min)*par.G*par.L[t+1]*psi_min
par.a_min[t] = -np.fmin(Omega,par.borrowingfac)*par.G*par.L[t+1]*psi_min
# d. end-of-period assets and cash-on-hand
par.grid_a = np.nan*np.ones((par.T,par.Na))
par.grid_m = np.nan*np.ones((par.T,par.Nm))
for t in range(par.T):
par.grid_a[t,:] = nonlinspace(par.a_min[t]+1e-6,par.a_max,par.Na,par.a_phi)
par.grid_m[t,:] = nonlinspace(par.a_min[t]+1e-6,par.m_max,par.Nm,par.m_phi)
# e. conditions
par.FHW = par.G/par.R
par.AI = (par.R*par.beta)**(1/par.rho)
par.GI = par.AI*np.sum(par.w*par.psi_vec**(-1))/par.G
par.RI = par.AI/par.R
par.WRI = par.low_p**(1/par.rho)*par.AI/par.R
par.FVA = par.beta*np.sum(par.w*(par.G*par.psi_vec)**(1-par.rho))
# f. fast solution with EGM
# grid_a tiled with the number of shocks
par.grid_a_tile = np.ones((par.TR,par.Na*par.Nshocks))
for t in range(par.TR):
par.grid_a_tile[t,:] = np.tile(par.grid_a[t,:],par.Nshocks)
# xi, psi and w repeated with the number of grid points for a
par.xi_vec_rep = np.repeat(par.xi_vec,par.Na)
par.psi_vec_rep = np.repeat(par.psi_vec,par.Na)
par.w_rep = np.repeat(par.w,par.Na)
# g. check for existance of solution
self.print_and_check_parameters(do_print=False)
def print_and_check_parameters(self,do_print=True):
""" print and check parameters """
par = self.par
if do_print:
print(f'FHW = {par.FHW:.3f}, AI = {par.AI:.3f}, GI = {par.GI:.3f}, RI = {par.RI:.3f}, WRI = {par.WRI:.3f}, FVA = {par.FVA:.3f}')
# check for existance of solution
if self.model == 'pf' and par.GI >= 1 and par.RI >= 1:
raise Exception('GI >= 1 and RI >= 1: no solution')
if self.model == 'bs' and (par.FVA >= 1 or par.WRI >= 1):
raise Exception('FVA >= 1 or WRI >= 1: no solution')
def utility(self,c):
""" utility function """
return c**(1-self.par.rho)/(1-self.par.rho)
def marg_utility(self,c):
""" marginal utility function """
return c**(-self.par.rho)
def inv_marg_utility(self,u):
""" inverse marginal utility funciton """
return u**(-1/self.par.rho)
#########
# solve #
#########
def solve(self,do_print=True):
""" gateway for solving the model """
# a. create (or re-create) grids
self.create_grids()
# b. solve
if self.solmethod in ['EGM','EGMvec']:
self.solve_EGM(do_print=do_print)
elif self.solmethod == 'VFI':
self.solve_VFI(do_print=do_print)
else:
raise Exception(f'{self.solmethod} is an unknown solution method')
def solve_EGM(self,do_print):
""" solve model using EGM """
t0 = time.time()
par = self.par
sol = self.sol
# a. allocate
sol.m = np.zeros((par.T,par.Na+1))
sol.c = np.zeros((par.T,par.Na+1))
sol.inv_v = np.zeros((par.T,par.Na+1))
# working memory
m = np.zeros(par.Na)
c = np.zeros(par.Na)
inv_v = np.zeros(par.Na)
# b. last period (= consume all)
sol.m[-1,:] = np.linspace(0,par.a_max,par.Na+1)
sol.c[-1,:] = sol.m[-1,:]
sol.inv_v[-1,0] = 0
sol.inv_v[-1,1:] = 1.0/self.utility(sol.c[-1,1:])
# c. before last period
for t in reversed(range(par.T-1)):
# i. solve by EGM
if self.solmethod == 'EGM':
self.EGM(t,m,c,inv_v)
elif self.solmethod == 'EGMvec':
self.EGMvec(t,m,c,inv_v)
# ii. add zero consumption
sol.m[t,0] = par.a_min[t]
sol.m[t,1:] = m
sol.c[t,0] = 0
sol.c[t,1:] = c
sol.inv_v[t,0] = 0
sol.inv_v[t,1:] = inv_v
if do_print:
print(f'model solved in {time.time()-t0:.1f} secs')
def EGM(self,t,m,c,inv_v):
""" EGM with partly vectorized code """
par = self.par
sol = self.sol
# loop over end-of-period assets
for i_a in range(par.Na):
# a. prep
a = par.grid_a[t,i_a]
if t+1 <= par.TR-1: # still working in next-period
fac = par.G*par.L[t]*par.psi_vec
w = par.w
xi = par.xi_vec
else:
fac = par.G*par.L[t]
w = 1
xi = 1
inv_fac = 1.0/fac
# b. future m and c (vectors)
m_plus = inv_fac*par.R*a + xi
c_plus = np.zeros(m_plus.size)
linear_interp.interp_1d_vec(sol.m[t+1,:],sol.c[t+1,:],m_plus,c_plus)
inv_v_plus = np.zeros(m_plus.size)
linear_interp.interp_1d_vec(sol.m[t+1,:],sol.inv_v[t+1,:],m_plus,inv_v_plus)
v_plus = 1.0/inv_v_plus
# c. average future marginal utility (number)
marg_u_plus = self.marg_utility(fac*c_plus)
avg_marg_u_plus = np.sum(w*marg_u_plus)
avg_v_plus = np.sum(w*(fac**(1-par.rho))*v_plus)
# d. current c
c[i_a] = self.inv_marg_utility(par.beta*par.R*avg_marg_u_plus)
# e. current m
m[i_a] = a + c[i_a]
# f. current v
if c[i_a] > 0:
inv_v[i_a] = 1.0/(self.utility(c[i_a]) + par.beta*avg_v_plus)
else:
inv_v[i_a] = 0
def EGMvec(self,t,m,c,inv_v):
""" EGM with fully vectorized code """
par = self.par
sol = self.sol
# a. prep
if t+1 <= par.TR-1: # still working in next-period
a = par.grid_a_tile[t,:]
fac = par.G*par.L[t]*par.psi_vec_rep
w = par.w_rep
xi = par.xi_vec_rep
Nshocks = par.Nshocks
else:
a = par.grid_a
fac = par.G*par.L[t]
w = 1
xi = 1
Nshocks = par.Nshocks
inv_fac = 1.0/fac
# b. future m and c
m_plus = inv_fac*par.R*a + xi
c_plus = np.zeros(m_plus.size)
linear_interp.interp_1d_vec(sol.m[t+1,:],sol.c[t+1,:],m_plus,c_plus)
inv_v_plus = np.zeros(m_plus.size)
linear_interp.interp_1d_vec(sol.m[t+1,:],sol.inv_v[t+1,:],m_plus,inv_v_plus)
v_plus = 1.0/inv_v_plus
# c. average future marginal utility
marg_u_plus = self.marg_utility(fac*c_plus)
avg_marg_u_plus = np.sum( (w*marg_u_plus).reshape((Nshocks,par.Na) ),axis=0)
avg_v_plus = np.sum( (w*(fac**(1-par.rho))*v_plus).reshape((Nshocks,par.Na) ),axis=0)
# d. current c
c[:] = self.inv_marg_utility(par.beta*par.R*avg_marg_u_plus)
# e. current m
m[:] = par.grid_a[t,:] + c
# f. current v
I = c > 0
inv_v[I] = 1.0/(self.utility(c[I]) + par.beta*avg_v_plus[I])
inv_v[~I] = 0.0
def solve_VFI(self,do_print):
""" solve model with VFI """
t0 = time.time()
par = self.par
sol = self.sol
# a. allocate solution
sol.m = np.nan*np.ones((par.T,par.Nm))
sol.c = np.nan*np.ones((par.T,par.Nm))
sol.inv_v = np.nan*np.ones((par.T,par.Nm))
# b. last period (= consume all)
sol.m[-1,:] = par.grid_m[-1,:]
sol.c[-1,:] = sol.m[-1,:]
sol.inv_v[-1,:] = 1.0/self.utility(sol.c[-1,:])
# c. before last period
for t in reversed(range(par.T-1)):
for i_m in range(par.Nm):
m = par.grid_m[t,i_m]
result = optimize.minimize_scalar(
lambda c: self.value_of_choice(c,t,m),method='bounded',
bounds=(0,m))
sol.c[t,i_m] = result.x
sol.inv_v[t,i_m]= -1/result.fun
# save grid for m
sol.m[t,:] = par.grid_m[t,:]
if do_print:
print(f'model solved in {time.time()-t0:.1f} secs')
def value_of_choice(self,c,t,m):
""" value of choice of c used in VFI """
par = self.par
sol = self.sol
# a. end-of-period assets
a = m-c
# b. next-period cash-on-hand
if t+1 <= par.TR-1: # still working in next-period
fac = par.G*par.L[t]*par.psi_vec
w = par.w
xi = par.xi_vec
else:
fac = par.G*par.L[t]
w = 1
xi = 1
m_plus = (par.R/fac)*a + xi
# c. continuation value
inv_v_plus = np.zeros(m_plus.size)
linear_interp.interp_1d_vec(sol.m[t+1,:],sol.inv_v[t+1,:],m_plus,inv_v_plus)
v_plus = 1/inv_v_plus
# d. value-of-choice
total = self.utility(c) + par.beta*np.sum(w*fac**(1-par.rho)*v_plus)
return -total
############
# simulate #
############
def simulate(self,seed=2017, do_print = True):
""" simulate the model """
np.random.seed(seed)
par = self.par
sim = self.sim
t0 = time.time()
# a. allocate
sim.m = np.nan*np.zeros((par.simN,par.simT))
sim.c = np.nan*np.zeros((par.simN,par.simT))
sim.a = np.nan*np.zeros((par.simN,par.simT))
sim.p = np.nan*np.zeros((par.simN,par.simT))
sim.y = np.nan*np.zeros((par.simN,par.simT))
# b. shocks
_shocki = np.random.choice(par.Nshocks,size=(par.simN,par.simT),p=par.w)
sim.psi = par.psi_vec[_shocki]
sim.xi = par.xi_vec[_shocki]
# c. initial values
sim.m[:,0] = par.sim_mini
sim.p[:,0] = 0.0
# d. simulation
self.simulate_timeloop()
# e. renomarlized
sim.P = np.exp(sim.p)
sim.Y = np.exp(sim.y)
sim.M = sim.m*sim.P
sim.C = sim.c*sim.P
sim.A = sim.a*sim.P
if do_print:
print(f'model simulated in {time.time()-t0:.1f} secs')
def simulate_timeloop(self):
""" simulate model with loop over time """
par = self.par
sol = self.sol
sim = self.sim
# loop over time
for t in range(par.simT):
# a. solution
if par.simlifecycle == 0:
grid_m = sol.m[0,:]
grid_c = sol.c[0,:]
else:
grid_m = sol.m[t,:]
grid_c = sol.c[t,:]
# b. consumption
linear_interp.interp_1d_vec(grid_m,grid_c,sim.m[:,t],sim.c[:,t])
sim.a[:,t] = sim.m[:,t] - sim.c[:,t]
# c. next-period states
if t < par.simT-1:
if t+1 > par.TR-1:
sim.m[:,t+1] = par.R*sim.a[:,t] / (par.G*par.L[t]) + 1
sim.p[:,t+1] = np.log(par.G) + np.log(par.L[t]) + sim.p[:,t]
sim.y[:,t+1] = sim.p[:,t+1]
else:
sim.m[:,t+1] = par.R*sim.a[:,t] / (par.G*par.L[t]*sim.psi[:,t+1]) + sim.xi[:,t+1]
sim.p[:,t+1] = np.log(par.G) + np.log(par.L[t]) + sim.p[:,t] + np.log(sim.psi[:,t+1])
I = sim.xi[:,t+1] > 0
sim.y[I,t+1] = sim.p[I,t+1] + np.log(sim.xi[I,t+1])
##################
# solution plots #
##################
def plot_value_function_convergence(self):
par = self.par
sol = self.sol
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
for t in [par.T-1, par.T-2, par.T-6, par.T-11, 100, 50, 0]:
if t > par.T-1 or t < 0: continue
ax.plot(sol.m[t,:],-sol.inv_v[t,:],label=f'$n = {par.T-t}$')
# limits
ax.set_xlim([np.min(par.a_min), 5])
ax.set_ylim([0, 1])
# layout
bbox = {'boxstyle':'square','ec':'white','fc':'white'}
ax.text(1.5,0.4,f'$\\beta = {par.beta:.2f}$, $R = {par.R:.2f}$, $G = {par.G:.2f}$',bbox=bbox)
ax.set_xlabel('$m_t$')
ax.set_ylabel('$-1.0/v_t(m_t)$')
ax.legend(loc='upper right',frameon=True)
fig.savefig(f'figs/val_converge_{self.name}.pdf')
def plot_consumption_function_convergence(self):
par = self.par
sol = self.sol
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
for t in [par.T-1, par.T-2, par.T-6, par.T-11, 100, 50, 0]:
if t > par.T-1 or t < 0: continue
ax.plot(sol.m[t,:],sol.c[t,:],label=f'$n = {par.T-t}$')
# limits
ax.set_xlim([np.min(par.a_min), 5])
ax.set_ylim([0, 5])
# layout
bbox = {'boxstyle':'square','ec':'white','fc':'white'}
ax.text(1.5,0.5,f'$\\beta = {par.beta:.2f}$, $R = {par.R:.2f}$, $G = {par.G:.2f}$',bbox=bbox)
ax.set_xlabel('$m_t$')
ax.set_ylabel('$-1.0/v_t(m_t)$')
ax.legend(loc='upper left',frameon=True)
fig.savefig(f'figs/cons_converge_{self.name}.pdf')
def plot_consumption_function_convergence_age(self):
par = self.par
sol = self.sol
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
# consumption function for various ages
for age in [25, 35, 45, 55, 65, 75, par.T+par.age_min-2, par.T+par.age_min-1]:
ax.plot(sol.m[age-par.age_min],sol.c[age-par.age_min],label=f'age = {age}')
# limits
ax.set_xlim([min(par.a_min), 5])
ax.set_ylim([0, 5])
# layout
bbox = {'boxstyle':'square','ec':'white','fc':'white'}
ax.text(1.5,0.5,f'$\\beta = {par.beta:.2f}$, $R = {par.R:.2f}$, $G = {par.G:.2f}$',bbox=bbox)
ax.set_xlabel('$m_t$')
ax.set_ylabel('$c(m_t)$')
ax.legend(loc='upper left',frameon=True)
fig.savefig(f'figs/cons_converge_{self.name}.pdf')
def plot_consumption_function_pf(self):
par = self.par
sol = self.sol
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
# perfect foresight consumption
c_pf = (1-par.RI)*(sol.m[0,:]+(1-par.FHW)**(-1)-1)
# consumption function deviation from perfect foresight
ax.plot(sol.m[0,:],sol.c[0,:]-c_pf,'-',lw=1.5)
# limits
ax.set_xlim([1, 500])
ylim_now = ax.set_ylim()
if np.max(np.abs(ylim_now)) < 1e-4:
ax.set_ylim([-1,1])
# layout
ax.set_xlabel('$m_t$')
ax.set_ylabel('$c(m_t) - c^{PF}(m_t)$')
fig.savefig(f'figs/cons_converge_pf_{self.name}.pdf')
def plot_buffer_stock_target(self):
par = self.par
sol = self.sol
# a. find a and avg. m_plus and c_plus
# allocate
a = np.nan*np.ones(par.Na+1)
m_plus = np.nan*np.ones(par.Na+1)
C_plus = np.nan*np.ones(par.Na+1)
delta_log_C_plus = np.nan*np.ones(par.Na+1)
delta_log_C_plus_approx_2 = np.nan*np.ones(par.Na+1)
fac = 1.0/(par.G*par.psi_vec)
for i_a in range(par.Na+1):
# a. a and m
a[i_a] = sol.m[0,i_a]-sol.c[0,i_a]
m_plus[i_a] = np.sum(par.w*(fac*par.R*a[i_a] + par.xi_vec))
# b. C_plus
m_plus_vec = fac*par.R*a[i_a] + par.xi_vec
c_plus_vec = np.zeros(m_plus_vec.size)
linear_interp.interp_1d_vec(sol.m[0,:],sol.c[0,:],m_plus_vec,c_plus_vec)
C_plus_vec = par.G*par.psi_vec*c_plus_vec
C_plus[i_a] = np.sum(par.w*C_plus_vec)
# c. approx
if self.model == 'bs' and sol.c[0,i_a] > 0:
delta_log_C_plus[i_a] = np.sum(par.w*(np.log(par.G*C_plus_vec)))-np.log(sol.c[0,i_a])
var_C_plus = np.sum(par.w*(np.log(par.G*C_plus_vec) - np.log(sol.c[0,i_a]) - delta_log_C_plus[i_a])**2)
delta_log_C_plus_approx_2[i_a] = par.rho**(-1)*(np.log(par.R*par.beta)) + 2/par.rho*var_C_plus + np.log(par.G)
# b. find target
i = np.argmin(np.abs(m_plus-sol.m[0,:]))
m_target = sol.m[0,i]
# c. figure 1 - buffer-stock target
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
# limits
ax.set_xlim([np.min(par.a_min), 5])
ax.set_ylim([0, 5])
# layout
bbox = {'boxstyle':'square','ec':'white','fc':'white'}
ax.text(2.1,0.25,f'$\\beta = {par.beta:.2f}$, $R = {par.R:.2f}$, $G = {par.G:.2f}$',bbox=bbox)
ax.set_xlabel('$m_t$')
ax.set_ylabel('')
# i. consumption
ax.plot(sol.m[0,:],sol.c[0,:],'-',lw=1.5,label='$c(m_t)$')
ax.legend(loc='upper left',frameon=True)
fig.savefig(f'figs/buffer_stock_target_{self.name}_c.pdf')
# ii. perfect foresight solution
if par.FHW < 1 and par.RI < 1:
c_pf = (1-par.RI)*(sol.m[0,:]+(1-par.FHW)**(-1)-1)
ax.plot(sol.m[0,:],c_pf,':',lw=1.5,color='black',label='$c^{PF}(m_t)$')
ax.legend(loc='upper left',frameon=True)
fig.savefig(f'figs/buffer_stock_target_{self.name}_pf.pdf')
# iii. a
ax.plot(sol.m[0,:],a,'-',lw=1.5,label=r'$a_t=m_t-c^{\star}(m_t)$')
ax.legend(loc='upper left',frameon=True)
fig.savefig(f'figs/buffer_stock_target_{self.name}_a.pdf')
# iv. m_plus
ax.plot(sol.m[0,:],m_plus,'-',lw=1.5,label='$E[m_{t+1} | a_t]$')
ax.legend(loc='upper left',frameon=True)
fig.savefig(f'figs/buffer_stock_target_{self.name}_m_plus.pdf')
# v. 45
ax.plot([0,5],[0,5],'-',lw=1.5,color='black',label='45 degree')
ax.legend(loc='upper left',frameon=True)
fig.savefig(f'figs/buffer_stock_target_{self.name}_45.pdf')
# vi. target
if self.model == 'bs' and par.GI < 1:
ax.plot([m_target,m_target],[0,5],'--',lw=1.5,color='black',label=f'target = {m_target:.2f}')
ax.legend(loc='upper left',frameon=True)
fig.savefig(f'figs/buffer_stock_target_{self.name}.pdf')
# STOP
if self.model == 'pf':
return
# d. figure 2 - C ratio
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
I = sol.c[0,:] > 0
ax.plot(sol.m[0,I],(C_plus[I]/sol.c[0,I]),'-',lw=1.5,label='$E[C_{t+1}/C_t]$')
ax.plot([m_target,m_target],[0,10],'--',lw=1.5,color='black',label='target')
ax.plot([np.min(par.a_min),500],[par.G,par.G],':',lw=1.5,color='black',label='$G$')
ax.plot([np.min(par.a_min),500],[(par.R*par.beta)**(1/par.rho),(par.R*par.beta)**(1/par.rho)],
'-',lw=1.5,color='black',label=r'$(\beta R)^{1/\rho}$')
# limit
ax.set_xlim([np.min(par.a_min),10])
ax.set_ylim([0.95,1.1])
# layout
ax.set_xlabel('$m_t$')
ax.set_ylabel('$C_{t+1}/C_t$')
ax.legend(loc='upper right',frameon=True)
fig.savefig(f'figs/cons_growth_{self.name}.pdf')
# e. figure 3 - euler approx
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
ax.plot(sol.m[0,:],delta_log_C_plus,'-',lw=1.5,label=r'$E[\Delta \log C_{t+1}]$')
ax.plot(sol.m[0,:],par.rho**(-1)*np.log(par.R*par.beta)*np.ones(par.Na+1)+np.log(par.G),'-',lw=1.5,label='1st order approx.')
ax.plot(sol.m[0,:],delta_log_C_plus_approx_2,'-',lw=1.5,label='2nd order approx.')
ax.plot([m_target,m_target],[-10 ,10],'--',lw=1.5,color='black',label='target')
# limit
ax.set_xlim([np.min(par.a_min),10])
ax.set_ylim([-0.03,0.12])
# layout
ax.set_xlabel('$m_t$')
ax.set_ylabel(r'$E[\Delta \log C_{t+1}]$')
ax.legend(loc='upper right',frameon=True)
fig.savefig(f'figs/euler_approx_{self.name}.pdf')
####################
# simulation plots #
####################
def plot_simulate_cdf_cash_on_hand(self):
par = self.par
sim = self.sim
# figure
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
for t in [0,1,2,4,9,29,49,par.simT-1]:
ecdf = ECDF(sim.m[:,t])
ax.plot(ecdf.x,ecdf.y,lw=1.5,label=f'$t = {t}$')
# limits
ax.set_xlim([np.min(par.a_min),4])
# layout
ax.set_xlabel('$m_t$')
ax.set_ylabel('CDF')
ax.legend(loc='upper right',frameon=True)
fig.savefig(f'figs/sim_cdf_cash_on_hand_{self.name}.pdf')
def plot_simulate_consumption_growth(self):
par = self.par
sim = self.sim
# 1. consumption growth
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
y = np.mean(np.log(sim.C[:,1:])-np.log(sim.C[:,:-1]),axis=0)
ax.plot(np.arange(par.simT-1),y,'-',lw=1.5,label=r'$E[\Delta\log(C_t)]$')
y = np.log(np.mean(sim.C[:,1:],axis=0))-np.log(np.mean(sim.C[:,:-1],axis=0))
ax.plot(np.arange(par.simT-1),y,'-',lw=1.5,
label=r'$\Delta\log(E[C_t])$')
ax.axhline(np.log(par.G),ls='-',lw=1.5,color='black',label='$\\log(G)$')
ax.axhline(np.log(par.G)-0.5*par.sigma_psi**2,ls='--',lw=1.5,color='black',label=r'$\log(G)-0.5\sigma_{\psi}^2$')
# layout
ax.set_xlabel('time')
ax.set_ylabel('')
ax.legend(loc='lower right',frameon=True)
fig.savefig(f'figs/sim_cons_growth_{self.name}.pdf')
# b. cash-on-hand
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
ax.plot(np.arange(par.simT),np.mean(sim.m,axis=0),'-',lw=1.5,label='mean')
ax.plot(np.arange(par.simT),np.percentile(sim.m,25,axis=0),'--',lw=1.5,color='black',label='25th percentile')
ax.plot(np.arange(par.simT),np.percentile(sim.m,75,axis=0),'--',lw=1.5,color='black',label='75th percentile')
# layout
ax.set_xlabel('time')
ax.set_ylabel('$m_t$')
ax.legend(loc='upper right',frameon=True)
fig.savefig(f'figs/sim_cash_on_hand_{self.name}.pdf')
####################
# life-cycle plots #
####################
def plot_life_cycle_income(self):
par = self.par
sim = self.sim
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
ax.plot(par.age_min+np.arange(1,par.simT),np.nanmean(sim.Y[:,1:],axis=0),'-',lw=1.5)
# layout
ax.set_ylabel('income, $Y_t$')
ax.set_xlabel('age')
fig.savefig(f'figs/sim_Y_{self.name}.pdf')
def plot_life_cycle_cashonhand(self):
par = self.par
sim = self.sim
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
ax.plot(par.age_min+np.arange(par.simT),np.mean(sim.M,axis=0),'-',lw=1.5)
# layout
ax.set_ylabel('cash-on-hand, $M_t$')
ax.set_xlabel('age')
fig.savefig(f'figs/sim_M_{self.name}.pdf')
def plot_life_cycle_consumption(self):
par = self.par
sim = self.sim
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
ax.plot(par.age_min+np.arange(par.simT),np.mean(sim.C,axis=0),'-',lw=1.5)
# layout
ax.set_ylabel('consumption, $C_t$')
ax.set_xlabel('age')
fig.savefig(f'figs/sim_C_{self.name}.pdf')
def plot_life_cycle_assets(self):
par = self.par
sim = self.sim
fig = plt.figure(figsize=(6,4),dpi=100)
ax = fig.add_subplot(1,1,1)
ax.plot(par.age_min+np.arange(par.simT),np.mean(sim.A,axis=0),'-',lw=1.5)
# layout
ax.set_ylabel('assets, $A_t$')
ax.set_xlabel('age')
fig.savefig(f'figs/sim_A_{self.name}.pdf')
####################
# 2. numba version #
####################
## same results with faster code
class ConsumptionSavingModelClassNumba(ModelClass,ConsumptionSavingModelClass):
def __init__(self,name='baseline',solmethod='EGM',**kwargs):
# a. set baseline parameters
self.name = name
self.solmethod = solmethod
# b. define subclasses
parlist = [
# setup
('T',int64),
('TR',int64),
('age_min',int64),
('rho',double),
('beta',double),
('G',double),
('sigma_xi',double),
('sigma_psi',double),
('low_p',double),
('low_val',double),
('L',double[:]),
('R',double),
('borrowingfac',double),
('a_max',double),
('a_phi',double),
('m_max',double),
('m_phi',double),
('Npsi',int64),
('Nxi',int64),
('Na',int64),
('Nm',int64),
('sim_mini',double),
('simN',int64),
('simT',int64),
('simlifecycle',boolean),
# create grids
('psi_vec',double[:]),
('psi_w_vec',double[:]),
('xi_vec',double[:]),
('xi_w_vec',double[:]),
('w',double[:]),
('Nshocks',int64),
('a_min',double[:]),
('grid_a',double[:,:]),
('grid_m',double[:,:]),
('FHW',double),
('AI',double),
('GI',double),
('RI',double),
('WRI',double),
('FVA',double),
('grid_a_tile',double[:,:]),
('psi_vec_rep',double[:]),
('xi_vec_rep',double[:]),
('w_rep',double[:]),
]
sollist = [
('m',double[:,:]),
('c',double[:,:]),
('inv_v',double[:,:]),
]
simlist = [
('m',double[:,:]),
('c',double[:,:]),
('a',double[:,:]),
('p',double[:,:]),
('y',double[:,:]),
('psi',double[:,:]),
('xi',double[:,:]),
('P',double[:,:]),
('Y',double[:,:]),
('M',double[:,:]),
('C',double[:,:]),
('A',double[:,:]),
]
# c. create subclasses
self.par,self.sol,self.sim = self.create_subclasses(parlist,sollist,simlist)
self.setup()
# b. update parameters
for key,val in kwargs.items():
setattr(self.par,key,val) # like par.key = val
def EGM(self,t,m,c,inv_v):
""" overwrite method with numba version """
EGM(self.par,self.sol,t,m,c,inv_v)
def simulate_timeloop(self):
""" overwrite method with numba version """
simulate_timeloop(self.par,self.sol,self.sim)
# jitted utility function
@njit
def utility(par,c):
return c**(1-par.rho)/(1-par.rho)
@njit
def marg_utility(par,c):
return c**(-par.rho)
@njit
def inv_marg_utility(par,u):
return u**(-1/par.rho)
# jitted EGM
@njit(parallel=True)
def EGM(par,sol,t,m,c,inv_v):
""" EGM with fully unrolled loops """
# loop over end-of-period assets
for i_a in prange(par.Na):
a = par.grid_a[t,i_a]
still_working_next_period = t+1 <= par.TR-1
Nshocks = par.Nshocks if still_working_next_period else 1
# loop over shocks
avg_marg_u_plus = 0
avg_v_plus = 0
for i_shock in range(Nshocks):
# a. prep
if still_working_next_period:
fac = par.G*par.L[t]*par.psi_vec[i_shock]
w = par.w[i_shock]
xi = par.xi_vec[i_shock]
else:
fac = par.G*par.L[t]
w = 1
xi = 1
inv_fac = 1.0/fac
# b. future m and c
m_plus = inv_fac*par.R*a + xi
c_plus = linear_interp.interp_1d(sol.m[t+1,:],sol.c[t+1,:],m_plus)
inv_v_plus = linear_interp.interp_1d(sol.m[t+1,:],sol.inv_v[t+1,:],m_plus)
v_plus = 1.0/inv_v_plus
# c. average future marginal utility
marg_u_plus = marg_utility(par,fac*c_plus)
avg_marg_u_plus += w*marg_u_plus
avg_v_plus += w*(fac**(1-par.rho))*v_plus
# d. current c
c[i_a] = inv_marg_utility(par,par.beta*par.R*avg_marg_u_plus)
# e. current m
m[i_a] = a + c[i_a]
# f. current v
if c[i_a] > 0:
inv_v[i_a] = 1.0/(utility(par,c[i_a]) + par.beta*avg_v_plus)
else:
inv_v[i_a] = 0
# jitted simulate_timeloop
@njit(parallel=True)
def simulate_timeloop(par,sol,sim):
""" simulate model with parallization over households """
# unpack (helps numba)
m = sim.m
p = sim.p
y = sim.y
c = sim.c
a = sim.a
# loop over first households and then time
for i in prange(par.simN):
for t in range(par.simT):
# a. solution
if par.simlifecycle == 0:
grid_m = sol.m[0,:]
grid_c = sol.c[0,:]
else:
grid_m = sol.m[t,:]
grid_c = sol.c[t,:]
# b. consumption
c[i,t] = linear_interp.interp_1d(grid_m,grid_c,m[i,t])
a[i,t] = m[i,t] - c[i,t]
# c. next-period
if t < par.simT-1:
if t+1 > par.TR-1:
m[i,t+1] = par.R*a[i,t] / (par.G*par.L[t]) + 1
p[i,t+1] = np.log(par.G) + np.log(par.L[t]) + p[i,t]
y[i,t+1] = p[i,t+1]
else:
m[i,t+1] = par.R*a[i,t] / (par.G*par.L[t]*sim.psi[i,t+1]) + sim.xi[i,t+1]
p[i,t+1] = np.log(par.G) + np.log(par.L[t]) + p[i,t] + np.log(sim.psi[i,t+1])
if sim.xi[i,t+1] > 0:
y[i,t+1] = p[i,t+1] + np.log(sim.xi[i,t+1]) |
py | 1a43e307b508c316e3583dc14ec1e65d8c2486ce | # Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
"""
Gaussian Process
================
In this example we show how to use NUTS to sample from the posterior
over the hyperparameters of a gaussian process.
"""
import argparse
import os
import time
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import jax
from jax import vmap
import jax.numpy as jnp
import jax.random as random
import numpyro
import numpyro.distributions as dist
from numpyro.infer import MCMC, NUTS
matplotlib.use('Agg') # noqa: E402
# squared exponential kernel with diagonal noise term
def kernel(X, Z, var, length, noise, jitter=1.0e-6, include_noise=True):
deltaXsq = jnp.power((X[:, None] - Z) / length, 2.0)
k = var * jnp.exp(-0.5 * deltaXsq)
if include_noise:
k += (noise + jitter) * jnp.eye(X.shape[0])
return k
def model(X, Y):
# set uninformative log-normal priors on our three kernel hyperparameters
var = numpyro.sample("kernel_var", dist.LogNormal(0.0, 10.0))
noise = numpyro.sample("kernel_noise", dist.LogNormal(0.0, 10.0))
length = numpyro.sample("kernel_length", dist.LogNormal(0.0, 10.0))
# compute kernel
k = kernel(X, X, var, length, noise)
# sample Y according to the standard gaussian process formula
numpyro.sample("Y", dist.MultivariateNormal(loc=jnp.zeros(X.shape[0]), covariance_matrix=k),
obs=Y)
# helper function for doing hmc inference
def run_inference(model, args, rng_key, X, Y):
start = time.time()
kernel = NUTS(model)
mcmc = MCMC(kernel, args.num_warmup, args.num_samples, num_chains=args.num_chains,
progress_bar=False if "NUMPYRO_SPHINXBUILD" in os.environ else True)
mcmc.run(rng_key, X, Y)
mcmc.print_summary()
print('\nMCMC elapsed time:', time.time() - start)
return mcmc.get_samples()
# do GP prediction for a given set of hyperparameters. this makes use of the well-known
# formula for gaussian process predictions
def predict(rng_key, X, Y, X_test, var, length, noise):
# compute kernels between train and test data, etc.
k_pp = kernel(X_test, X_test, var, length, noise, include_noise=True)
k_pX = kernel(X_test, X, var, length, noise, include_noise=False)
k_XX = kernel(X, X, var, length, noise, include_noise=True)
K_xx_inv = jnp.linalg.inv(k_XX)
K = k_pp - jnp.matmul(k_pX, jnp.matmul(K_xx_inv, jnp.transpose(k_pX)))
sigma_noise = jnp.sqrt(jnp.clip(jnp.diag(K), a_min=0.)) * jax.random.normal(rng_key, X_test.shape[:1])
mean = jnp.matmul(k_pX, jnp.matmul(K_xx_inv, Y))
# we return both the mean function and a sample from the posterior predictive for the
# given set of hyperparameters
return mean, mean + sigma_noise
# create artificial regression dataset
def get_data(N=30, sigma_obs=0.15, N_test=400):
np.random.seed(0)
X = jnp.linspace(-1, 1, N)
Y = X + 0.2 * jnp.power(X, 3.0) + 0.5 * jnp.power(0.5 + X, 2.0) * jnp.sin(4.0 * X)
Y += sigma_obs * np.random.randn(N)
Y -= jnp.mean(Y)
Y /= jnp.std(Y)
assert X.shape == (N,)
assert Y.shape == (N,)
X_test = jnp.linspace(-1.3, 1.3, N_test)
return X, Y, X_test
def main(args):
X, Y, X_test = get_data(N=args.num_data)
# do inference
rng_key, rng_key_predict = random.split(random.PRNGKey(0))
samples = run_inference(model, args, rng_key, X, Y)
# do prediction
vmap_args = (random.split(rng_key_predict, args.num_samples * args.num_chains), samples['kernel_var'],
samples['kernel_length'], samples['kernel_noise'])
means, predictions = vmap(lambda rng_key, var, length, noise:
predict(rng_key, X, Y, X_test, var, length, noise))(*vmap_args)
mean_prediction = np.mean(means, axis=0)
percentiles = np.percentile(predictions, [5.0, 95.0], axis=0)
# make plots
fig, ax = plt.subplots(1, 1)
# plot training data
ax.plot(X, Y, 'kx')
# plot 90% confidence level of predictions
ax.fill_between(X_test, percentiles[0, :], percentiles[1, :], color='lightblue')
# plot mean prediction
ax.plot(X_test, mean_prediction, 'blue', ls='solid', lw=2.0)
ax.set(xlabel="X", ylabel="Y", title="Mean predictions with 90% CI")
plt.savefig("gp_plot.pdf")
plt.tight_layout()
if __name__ == "__main__":
assert numpyro.__version__.startswith('0.4.0')
parser = argparse.ArgumentParser(description="Gaussian Process example")
parser.add_argument("-n", "--num-samples", nargs="?", default=1000, type=int)
parser.add_argument("--num-warmup", nargs='?', default=1000, type=int)
parser.add_argument("--num-chains", nargs='?', default=1, type=int)
parser.add_argument("--num-data", nargs='?', default=25, type=int)
parser.add_argument("--device", default='cpu', type=str, help='use "cpu" or "gpu".')
args = parser.parse_args()
numpyro.set_platform(args.device)
numpyro.set_host_device_count(args.num_chains)
main(args)
|
py | 1a43e326862dfb930eef5cc5b9f05dc7308b21f9 | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet import RPCs.
Test rescan behavior of importaddress, importpubkey, importprivkey, and
importmulti RPCs with different types of keys and rescan options.
In the first part of the test, node 0 creates an address for each type of
import RPC call and sends XCBC to it. Then other nodes import the addresses,
and the test makes listtransactions and getbalance calls to confirm that the
importing node either did or did not execute rescans picking up the send
transactions.
In the second part of the test, node 0 sends more XCBC to each address, and the
test makes more listtransactions and getbalance calls to confirm that the
importing nodes pick up the new transactions regardless of whether rescans
happened previously.
"""
from test_framework.test_framework import XCBCTestFramework
from test_framework.address import AddressType
from test_framework.util import (
assert_equal,
set_node_times,
)
import collections
from decimal import Decimal
import enum
import itertools
import random
Call = enum.Enum("Call", "single multiaddress multiscript")
Data = enum.Enum("Data", "address pub priv")
Rescan = enum.Enum("Rescan", "no yes late_timestamp")
class Variant(collections.namedtuple("Variant", "call data address_type rescan prune")):
"""Helper for importing one key and verifying scanned transactions."""
def do_import(self, timestamp):
"""Call one key import RPC."""
rescan = self.rescan == Rescan.yes
assert_equal(self.address["solvable"], True)
assert_equal(self.address["isscript"], self.address_type == AddressType.p2sh_segwit)
assert_equal(self.address["iswitness"], self.address_type == AddressType.bech32)
if self.address["isscript"]:
assert_equal(self.address["embedded"]["isscript"], False)
assert_equal(self.address["embedded"]["iswitness"], True)
if self.call == Call.single:
if self.data == Data.address:
response = self.node.importaddress(address=self.address["address"], label=self.label, rescan=rescan)
elif self.data == Data.pub:
response = self.node.importpubkey(pubkey=self.address["pubkey"], label=self.label, rescan=rescan)
elif self.data == Data.priv:
response = self.node.importprivkey(privkey=self.key, label=self.label, rescan=rescan)
assert_equal(response, None)
elif self.call in (Call.multiaddress, Call.multiscript):
request = {
"scriptPubKey": {
"address": self.address["address"]
} if self.call == Call.multiaddress else self.address["scriptPubKey"],
"timestamp": timestamp + TIMESTAMP_WINDOW + (1 if self.rescan == Rescan.late_timestamp else 0),
"pubkeys": [self.address["pubkey"]] if self.data == Data.pub else [],
"keys": [self.key] if self.data == Data.priv else [],
"label": self.label,
"watchonly": self.data != Data.priv
}
if self.address_type == AddressType.p2sh_segwit and self.data != Data.address:
# We need solving data when providing a pubkey or privkey as data
request.update({"redeemscript": self.address['embedded']['scriptPubKey']})
response = self.node.importmulti(
requests=[request],
options={"rescan": self.rescan in (Rescan.yes, Rescan.late_timestamp)},
)
assert_equal(response, [{"success": True}])
def check(self, txid=None, amount=None, confirmation_height=None):
"""Verify that listtransactions/listreceivedbyaddress return expected values."""
txs = self.node.listtransactions(label=self.label, count=10000, include_watchonly=True)
current_height = self.node.getblockcount()
assert_equal(len(txs), self.expected_txs)
addresses = self.node.listreceivedbyaddress(minconf=0, include_watchonly=True, address_filter=self.address['address'])
if self.expected_txs:
assert_equal(len(addresses[0]["txids"]), self.expected_txs)
if txid is not None:
tx, = [tx for tx in txs if tx["txid"] == txid]
assert_equal(tx["label"], self.label)
assert_equal(tx["address"], self.address["address"])
assert_equal(tx["amount"], amount)
assert_equal(tx["category"], "receive")
assert_equal(tx["label"], self.label)
assert_equal(tx["txid"], txid)
assert_equal(tx["confirmations"], 1 + current_height - confirmation_height)
assert_equal("trusted" not in tx, True)
address, = [ad for ad in addresses if txid in ad["txids"]]
assert_equal(address["address"], self.address["address"])
assert_equal(address["amount"], self.expected_balance)
assert_equal(address["confirmations"], 1 + current_height - confirmation_height)
# Verify the transaction is correctly marked watchonly depending on
# whether the transaction pays to an imported public key or
# imported private key. The test setup ensures that transaction
# inputs will not be from watchonly keys (important because
# involvesWatchonly will be true if either the transaction output
# or inputs are watchonly).
if self.data != Data.priv:
assert_equal(address["involvesWatchonly"], True)
else:
assert_equal("involvesWatchonly" not in address, True)
# List of Variants for each way a key or address could be imported.
IMPORT_VARIANTS = [Variant(*variants) for variants in itertools.product(Call, Data, AddressType, Rescan, (False, True))]
# List of nodes to import keys to. Half the nodes will have pruning disabled,
# half will have it enabled. Different nodes will be used for imports that are
# expected to cause rescans, and imports that are not expected to cause
# rescans, in order to prevent rescans during later imports picking up
# transactions associated with earlier imports. This makes it easier to keep
# track of expected balances and transactions.
ImportNode = collections.namedtuple("ImportNode", "prune rescan")
IMPORT_NODES = [ImportNode(*fields) for fields in itertools.product((False, True), repeat=2)]
# Rescans start at the earliest block up to 2 hours before the key timestamp.
TIMESTAMP_WINDOW = 2 * 60 * 60
AMOUNT_DUST = 0.00000546
def get_rand_amount():
r = random.uniform(AMOUNT_DUST, 1)
return Decimal(str(round(r, 8)))
class ImportRescanTest(XCBCTestFramework):
def set_test_params(self):
self.num_nodes = 2 + len(IMPORT_NODES)
self.supports_cli = False
self.rpc_timeout = 120
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.extra_args = [[] for _ in range(self.num_nodes)]
for i, import_node in enumerate(IMPORT_NODES, 2):
if import_node.prune:
self.extra_args[i] += ["-prune=1"]
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
# Import keys with pruning disabled
self.start_nodes(extra_args=[[]] * self.num_nodes)
self.import_deterministic_coinbase_privkeys()
self.stop_nodes()
self.start_nodes()
for i in range(1, self.num_nodes):
self.connect_nodes(i, 0)
def run_test(self):
# Create one transaction on node 0 with a unique amount for
# each possible type of wallet import RPC.
for i, variant in enumerate(IMPORT_VARIANTS):
variant.label = "label {} {}".format(i, variant)
variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress(
label=variant.label,
address_type=variant.address_type.value,
))
variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
variant.initial_amount = get_rand_amount()
variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount)
self.nodes[0].generate(1) # Generate one block for each send
variant.confirmation_height = self.nodes[0].getblockcount()
variant.timestamp = self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"]
self.sync_all() # Conclude sync before calling setmocktime to avoid timeouts
# Generate a block further in the future (past the rescan window).
assert_equal(self.nodes[0].getrawmempool(), [])
set_node_times(
self.nodes,
self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"] + TIMESTAMP_WINDOW + 1,
)
self.nodes[0].generate(1)
self.sync_all()
# For each variation of wallet key import, invoke the import RPC and
# check the results from getbalance and listtransactions.
for variant in IMPORT_VARIANTS:
self.log.info('Run import for variant {}'.format(variant))
expect_rescan = variant.rescan == Rescan.yes
variant.node = self.nodes[2 + IMPORT_NODES.index(ImportNode(variant.prune, expect_rescan))]
variant.do_import(variant.timestamp)
if expect_rescan:
variant.expected_balance = variant.initial_amount
variant.expected_txs = 1
variant.check(variant.initial_txid, variant.initial_amount, variant.confirmation_height)
else:
variant.expected_balance = 0
variant.expected_txs = 0
variant.check()
# Create new transactions sending to each address.
for i, variant in enumerate(IMPORT_VARIANTS):
variant.sent_amount = get_rand_amount()
variant.sent_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.sent_amount)
self.nodes[0].generate(1) # Generate one block for each send
variant.confirmation_height = self.nodes[0].getblockcount()
assert_equal(self.nodes[0].getrawmempool(), [])
self.sync_all()
# Check the latest results from getbalance and listtransactions.
for variant in IMPORT_VARIANTS:
self.log.info('Run check for variant {}'.format(variant))
variant.expected_balance += variant.sent_amount
variant.expected_txs += 1
variant.check(variant.sent_txid, variant.sent_amount, variant.confirmation_height)
if __name__ == "__main__":
ImportRescanTest().main()
|
py | 1a43e33d04e3f0eaf830b974bf544c32498255dc | import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import os
import pdb
from tqdm import tqdm
import argparse
import pandas as pd
import sys
BASE_DIR=os.path.dirname(os.getcwd())
sys.path.append(BASE_DIR)
sys.path.append('/home/tam63/geometric-js')
import torch
import scipy.stats
from scipy.stats import norm
from scipy.special import logsumexp
from vae.utils.modelIO import save_model, load_model, load_metadata
from notebooks.utils import PlotParams
# from utils.helpers import (create_safe_directory, get_device, set_seed,
# get_n_param)
TRAIN_MODELS_DIR = "/home/tam63/results/alpha-experiments"
DATA_DIR = "/home/tam63/geometric-js/data"
SAVE_DIR = "/home/tam63/figures/alpha-experiments"
def parse_arguments(args_to_parse):
"""Parse the command line arguments.
Parameters
----------
args_to_parse: list of str
Arguments to parse (splitted on whitespaces).
"""
description = "PyTorch implementation and evaluation of Variational" + \
"AutoEncoders and metrics."
parser = argparse.ArgumentParser(description=description)
# General options
general = parser.add_argument_group('General options')
general.add_argument('--dataset', type=str, choices=['mnist', 'fashion', 'dsprites'],
help="Name of the dataset being plotted.")
general.add_argument('--divergence', type=str, choices=['dGJS', 'GJS', 'both'],
help="Type of geometric-JS divergence to be plotted on comparison plot.")
general.add_argument('--model-loc', type=str,
help="Location of the trained models to be used to generate plots.")
args = parser.parse_args(args_to_parse)
print(args)
return args
def bootstrap(x, low, high, n_samples):
mu = x.mean()
n = len(x)
X = np.random.choice(x, size=n_samples*n).reshape(n_samples, n)
mu_star = X.mean(axis=1)
d_star = np.sort(mu_star - mu)
return mu, mu + d_star[int(low*n_samples)], mu + d_star[int(high*n_samples)]
def compute_samples(model, data, num_samples, debug=False):
"""
Description
---------------------------------------------------------------
Sample from importance distribution z_samples ~ q(z|X) and
compute p(z_samples), q(z_samples) for importance sampling
Inputs
---------------------------------------------------------------
model : pytorch nn.Module
VAE model implemented in pytroch which has been
trained on the training data corresponding to the
passed test data, which is contained in the variable
'data'.
data : pytorch Tensor
Tensor of shape [batch_size, 1, im_size, im_size],
where im_size is the dimension size of the images used
to train the model, and batch size is the number of
data instances passed, which is therefore also the
number of estimates of the probability distribution
which will be produced.
num_samples : int
For each passed data instance, the probability
distribution p(x|z) will be estimated using a monte
carlo integration with num_samples samples.
returns
---------------------------------------------------------------
z_samples, pz, qz : numpy array
Returns arrays containing the representation of each
passed input image in latent space in z_samples, and the
probabilty distributions qz and pz which are defined by
samples drawn from the normal distribution defined by the
latent space (qz) and defined by the latent space
"""
data = data.cuda()
z_mean, z_log_sigma = model.encoder(data)
z_mean = z_mean.cpu().detach().numpy()
z_log_sigma = z_log_sigma.cpu().detach().numpy()
z_samples = []
qz = []
for m, s in zip(z_mean, z_log_sigma):
# len(s) = len(s) = 10 = size of the latent space dimension
#
# z_vals is num_samples (= 128) samples drawn from the normal
# distribution defined by the mean and std (m[i], s[i])
#
# qz_vals is the normal distribution defined by the samples
# in the vector z_vals
z_vals = [np.random.normal(m[i], np.exp(s[i]), num_samples) for i in range(len(m))]
qz_vals = [norm.pdf(z_vals[i], loc=m[i], scale=np.exp(s[i])) for i in range(len(m))]
z_samples.append(z_vals)
qz.append(qz_vals)
z_samples = np.array(z_samples)
pz = norm.pdf(z_samples)
qz = np.array(qz)
# pdb.set_trace()
# Check why the axes are being swapped
z_samples = np.swapaxes(z_samples, 1, 2)
pz = np.swapaxes(pz, 1, 2)
qz = np.swapaxes(qz, 1, 2)
return z_samples, pz, qz
def estimate_logpx_batch(model, data, num_samples, debug=False, digit_size=32):
"""
"""
z_samples, pz, qz = compute_samples(model, data, num_samples)
assert len(z_samples) == len(data)
assert len(z_samples) == len(pz)
assert len(z_samples) == len(qz)
z_samples = torch.tensor(z_samples).float().cuda()
result = []
for i in range(len(data)):
x_predict = model.decoder(z_samples[i]).reshape(-1, digit_size ** 2)
x_predict = x_predict.cpu().detach().numpy()
x_predict = np.clip(x_predict, np.finfo(float).eps, 1. - np.finfo(float).eps)
p_vals = pz[i]
q_vals = qz[i]
# pdb.set_trace()
datum = data[i].cpu().reshape(digit_size ** 2).numpy() #.reshape(digit_size ** 2)
# \log p(x|z) = Binary cross entropy
logp_xz = np.sum(datum * np.log(x_predict + 1e-9) + (1. - datum) * np.log(1.0 - x_predict + 1e-9), axis=-1)
logpz = np.sum(np.log(p_vals + 1e-9), axis=-1)
logqz = np.sum(np.log(q_vals + 1e-9), axis=-1)
argsum = logp_xz + logpz - logqz
logpx = -np.log(num_samples + 1e-9) + logsumexp(argsum)
result.append(logpx)
return np.array(result)
def estimate_logpx(model, data, num_samples, verbosity=0, digit_size=32):
batches = []
iterations = int(np.ceil(1. * len(data) / 100))
for b in tqdm(range(iterations)):
batch_data = data[b * 100:(b + 1) * 100]
batches.append(estimate_logpx_batch(model, batch_data, num_samples, digit_size=digit_size))
if verbosity and b % max(11 - verbosity, 1) == 0:
print("Batch %d [%d, %d): %.2f" % (b, b * 100, (b+1) * 100, np.mean(np.concatenate(batches))))
log_probs = np.concatenate(batches)
mu, lb, ub = bootstrap(log_probs, 0.025, 0.975, 1000)
return mu, lb, ub
def main(args):
device = 'cuda'
plotter = PlotParams()
plotter.set_params()
DATA_DIR = os.path.join(os.pardir, 'data')
FIG_DIR = os.path.join(os.pardir, 'figs')
RES_DIR = os.path.join(os.pardir, 'results')
# 1) select dataset to load:
if args.dataset == 'dsprites':
X_test = np.load(os.path.join(DATA_DIR, 'dsprites', 'dsprite_train.npz'))['imgs']
X_test = torch.tensor(X_test).unsqueeze(1).float() / 255.0
digit_size = 64
X_test = X_test[:10000]
X_test = X_test.to(device)
elif args.dataset == 'fashion':
X_test = torch.load(os.path.join(DATA_DIR, 'fashionMnist', 'FashionMNIST', 'processed', 'test.pt'))
digit_size = 32
X_test = X_test[0].unsqueeze(1).float() / 255.0
X_test = torch.nn.functional.pad(X_test, pad=(2, 2, 2, 2))
X_test = X_test[:10000]
X_test = X_test.to(device)
elif args.dataset == 'mnist':
X_test = torch.load(os.path.join(DATA_DIR, 'mnist', 'MNIST', 'processed', 'test.pt'))
digit_size = 32
X_test = X_test[0].unsqueeze(1).float() / 255.0
X_test = torch.nn.functional.pad(X_test, pad=(2, 2, 2, 2))
X_test = X_test[:10000]
X_test = X_test.to(device)
# 2) Get the trained alpha dGJS probabilities:
av_a = []
log_probs_lb = []
log_probs_ub = []
log_probs_mu = []
log_probs_best = -np.inf
if args.divergence in ['GJS', 'dGJS']:
divergence = args.divergence
for initial_a in [i/10 for i in range(11)]:
model_path = f"{TRAIN_MODELS_DIR}/{args.dataset}/{args.model_loc}/{divergence}-A_0={initial_a}"
model = load_model(model_path)
logpx_mu, logpx_lb, logpx_ub = estimate_logpx(model, X_test, num_samples=128, verbosity=0, digit_size=digit_size)
log_probs_mu += [logpx_mu]
log_probs_lb += [logpx_lb]
log_probs_ub += [logpx_ub]
if logpx_mu > log_probs_best:
model_best = model_path
log_probs_best = logpx_mu
# break
print(model_path)
print("log p(x) = %.2f (%.2f, %.2f)" % (logpx_mu, logpx_lb, logpx_ub))
# 3) Get the comparison divergences probabilities:
av_a_i = []
log_probs_lb_i = []
log_probs_ub_i = []
log_probs_mu_i = []
log_probs_best_i = -np.inf
model_names = []
# KL:
model_path = f"{TRAIN_MODELS_DIR}/{args.dataset}/{args.model_loc}/KL"
model = load_model(model_path)
logpx_mu, logpx_lb, logpx_ub = estimate_logpx(model, X_test, num_samples=128, verbosity=0, digit_size=digit_size)
log_probs_mu_i += [logpx_mu]
log_probs_lb_i += [logpx_lb]
log_probs_ub_i += [logpx_ub]
model_names.append("KL")
# break
print(model_path)
print("log p(x) = %.2f (%.2f, %.2f)" % (logpx_mu, logpx_lb, logpx_ub))
# fwdKL:
model_path = f"{TRAIN_MODELS_DIR}/{args.dataset}/{args.model_loc}/fwdKL"
model = load_model(model_path)
logpx_mu, logpx_lb, logpx_ub = estimate_logpx(model, X_test, num_samples=128, verbosity=0, digit_size=digit_size)
log_probs_mu_i += [logpx_mu]
log_probs_lb_i += [logpx_lb]
log_probs_ub_i += [logpx_ub]
model_names.append("fwdKL")
# break
print(model_path)
print("log p(x) = %.2f (%.2f, %.2f)" % (logpx_mu, logpx_lb, logpx_ub))
# MMD:
model_path = f"{TRAIN_MODELS_DIR}/{args.dataset}/{args.model_loc}/MMD"
model = load_model(model_path)
logpx_mu, logpx_lb, logpx_ub = estimate_logpx(model, X_test, num_samples=128, verbosity=0, digit_size=digit_size)
log_probs_mu_i += [logpx_mu]
log_probs_lb_i += [logpx_lb]
log_probs_ub_i += [logpx_ub]
model_names.append("MMD")
# break
print(model_path)
print("log p(x) = %.2f (%.2f, %.2f)" % (logpx_mu, logpx_lb, logpx_ub))
# no-constraint:
# model_path = f"{TRAIN_MODELS_DIR}/{args.dataset}/{args.model_loc}/no-constraint"
# model = load_model(model_path)
# logpx_mu, logpx_lb, logpx_ub = estimate_logpx(model, X_test, num_samples=128, verbosity=0, digit_size=digit_size)
# log_probs_mu_i += [logpx_mu]
# log_probs_lb_i += [logpx_lb]
# log_probs_ub_i += [logpx_ub]
# model_names.append("no-constraint")
# print(model_path)
# print("log p(x) = %.2f (%.2f, %.2f)" % (logpx_mu, logpx_lb, logpx_ub))
# 4) Plot:
fig = plt.figure(figsize=(10, 10))
yerr_bar = np.array(log_probs_ub) - np.array(log_probs_lb)
yerr_bar_i = np.array(log_probs_ub_i) - np.array(log_probs_lb_i)
initial_a = [i/10 for i in range(11)]
plt.errorbar(initial_a, log_probs_mu, yerr=yerr_bar, label=args.divergence)
for i in range(len(model_names)):
plt.errorbar(initial_a, [log_probs_mu_i[i]] * len(initial_a), yerr=[yerr_bar_i[i]] * len(initial_a), label=model_names[i])
plt.xlabel(r'Initial $\alpha$')
plt.ylabel(r'$\log(p_{\theta}(X))$')
plt.legend()
plt.title("Log model evidence vs initial alpha")
plt.savefig(f"{SAVE_DIR}/{args.dataset}/{args.divergence}/{args.divergence}-generative-performance.pdf")
plt.savefig(f"{SAVE_DIR}/{args.dataset}/{args.divergence}/{args.divergence}-generative-performance.png", dpi=200)
# save tight layout version:
fig = plt.figure(figsize=(10, 10))
yerr_bar = np.array(log_probs_ub) - np.array(log_probs_lb)
yerr_bar_i = np.array(log_probs_ub_i) - np.array(log_probs_lb_i)
initial_a = [i/10 for i in range(11)]
plt.errorbar(initial_a, log_probs_mu, yerr=yerr_bar, label=args.divergence)
for i in range(len(model_names)):
plt.errorbar(initial_a, [log_probs_mu_i[i]] * len(initial_a), yerr=[yerr_bar_i[i]] * len(initial_a), label=model_names[i])
plt.xlabel(r'Initial $\alpha$')
plt.ylabel(r'$\log(p_{\theta}(X))$')
plt.legend()
plt.tight_layout(pad=1.0, w_pad=1.0, h_pad=1.0)
plt.savefig(f"{SAVE_DIR}/{args.dataset}/{args.divergence}/{args.divergence}-generative-performance-tight-layout.pdf")
plt.savefig(f"{SAVE_DIR}/{args.dataset}/{args.divergence}/{args.divergence}-generative-performance-tight-layout.png", dpi=200)
if __name__ == '__main__':
args = parse_arguments(sys.argv[1:])
main(args) |
py | 1a43e371d5557e821c3454b75bf44d600f9dd464 | import argparse
from utils.helpers import read_lines
from gector.gec_model import GecBERTModel
def predict_for_file(input_file, output_file, model, batch_size=32):
test_data = read_lines(input_file)
predictions = []
cnt_corrections = 0
batch = []
count = 0
for sent in test_data:
batch.append(sent.split())
if len(batch) == batch_size:
preds, cnt = model.handle_batch(batch, count, batch_size)
predictions.extend(preds)
cnt_corrections += cnt
batch = []
count += 1
if batch:
preds, cnt = model.handle_batch(batch, count, batch_size)
predictions.extend(preds)
cnt_corrections += cnt
with open(output_file, 'w') as f:
f.write("\n".join([" ".join(x) for x in predictions]) + '\n')
return cnt_corrections
def main(args):
# get all paths
model = GecBERTModel(vocab_path=args.vocab_path,
model_paths=args.model_path,
max_len=args.max_len, min_len=args.min_len,
iterations=args.iteration_count,
min_error_probability=args.min_error_probability,
lowercase_tokens=args.lowercase_tokens,
model_name=args.transformer_model,
special_tokens_fix=args.special_tokens_fix,
log=False,
confidence=args.additional_confidence,
is_ensemble=args.is_ensemble,
weigths=args.weights)
cnt_corrections = predict_for_file(args.input_file, args.output_file, model,
batch_size=args.batch_size)
# evaluate with m2 or ERRANT
print(f"Produced overall corrections: {cnt_corrections}")
if __name__ == '__main__':
# read parameters
parser = argparse.ArgumentParser()
parser.add_argument('--model_path',
help='Path to the model file.', nargs='+',
required=True)
parser.add_argument('--vocab_path',
help='Path to the model file.',
default='data/output_vocabulary' # to use pretrained models
)
parser.add_argument('--input_file',
help='Path to the evalset file',
required=True)
parser.add_argument('--output_file',
help='Path to the output file',
required=True)
parser.add_argument('--max_len',
type=int,
help='The max sentence length'
'(all longer will be truncated)',
default=50)
parser.add_argument('--min_len',
type=int,
help='The minimum sentence length'
'(all longer will be returned w/o changes)',
default=3)
parser.add_argument('--batch_size',
type=int,
help='The size of hidden unit cell.',
default=128)
parser.add_argument('--lowercase_tokens',
type=int,
help='Whether to lowercase tokens.',
default=0)
parser.add_argument('--transformer_model',
choices=['bert', 'gpt2', 'transformerxl', 'xlnet', 'distilbert', 'roberta', 'albert'],
help='Name of the transformer model.',
default='roberta')
parser.add_argument('--iteration_count',
type=int,
help='The number of iterations of the model.',
default=5)
parser.add_argument('--additional_confidence',
type=float,
help='How many probability to add to $KEEP token.',
default=0)
parser.add_argument('--min_error_probability',
type=float,
help='Minimum probability for each action to apply. '
'Also, minimum error probability, as described in the paper.',
default=0.0)
parser.add_argument('--special_tokens_fix',
type=int,
help='Whether to fix problem with [CLS], [SEP] tokens tokenization. '
'For reproducing reported results it should be 0 for BERT/XLNet and 1 for RoBERTa.',
default=1)
parser.add_argument('--is_ensemble',
type=int,
help='Whether to do ensembling.',
default=0)
parser.add_argument('--weights',
help='Used to calculate weighted average', nargs='+',
default=None)
args = parser.parse_args()
main(args)
|
py | 1a43e4e02196fc7af0d858001b38d84c05a3923c |
class BinarySearchTree(object):
class Node(object):
def __init__(self, key, value):
self.left = None
self.right = None
self.key = key
self.value = value
def __repr__(self):
return "Node(key={}, value={}, left={}, right={})".format(self.key, self.value, self.left, self.right)
def match_and_parent(self, key, parent=None):
if self.key == key:
return self, parent
elif self.key < key and self.right is not None:
return self.right.match_and_parent(key, self)
elif self.key > key and self.left is not None:
return self.left.match_and_parent(key, self)
else:
return None, self
def add_child(self, node):
if self.key < node.key:
assert self.right is None
self.right = node
elif self.key > node.key:
assert self.left is None
self.left = node
else:
raise ValueError('Adding child with equal key')
def remove_child(self, node):
if node is self.left:
self.left = None
elif node is self.right:
self.right = None
else:
raise ValueError("Not this node's child")
def __init__(self):
self.root = None
self.size = 0
def get(self, key):
if self.root is None:
raise IndexError('Key {} not Found'.format(key))
node, _ = self.root.match_and_parent(key)
if node is None:
raise IndexError('Key {} not Found'.format(key))
return node.value
def set(self, key, value):
if self.root is None:
self.root = self.Node(key, value)
self.size += 1
return
node, parent = self.root.match_and_parent(key)
if node is None:
node = self.Node(key, value)
parent.add_child(node)
self.size += 1
else:
node.value = value
def remove(self, key):
if self.root is None:
raise IndexError('Key {} not Found'.format(key))
node, parent = self.root.match_and_parent(key, self.root)
if node is None:
raise IndexError('Key {} not Found'.format(key))
elif node is parent:
self.root = None
self.size = 0
else:
parent.remove_child(node)
self.size -= 1
def __len__(self):
return self.size
def __eq__(self, other):
if self.root is None and other.root is None:
return True
elif self.root is None or other.root is None:
return False
elif len(self) != len(other):
return False
for i in range(len(self)):
a = self.get(i)
b = other.get(i)
if a != b:
return False
return True
class BSTString(object):
"""Implementation of String using a binary search tree
This is pretty ineffective for everything, is intended as
using a barebones data structure for a string implementation
that isn't trivial like List.
"""
def __init__(self, s=""):
self._data = BinarySearchTree()
for i, c in enumerate(s):
self._data.set(i, c)
def __getitem__(self, position):
return self._data.get(position)
def __add__(self, other):
result = BSTString("")
n = len(self)
for i in range(n):
result._data.set(i, self[i])
for i in range(n, n + len(other)):
result._data.set(i, other[i - n])
return result
def __eq__(self, other):
return self._data == other._data
def __len__(self):
return len(self._data)
def __repr__(self):
return ''.join([self._data.get(i) for i in range(len(self))])
def split(self, n):
indices = [i for i in range(len(self))]
index_list = [indices[i:i+n] for i in range(0, len(self), n)]
result = []
for indices in index_list:
result.append(BSTString([self._data.get(i) for i in indices]))
return result
|
py | 1a43e4ef43cc2725a8d1c8a5e91ec869b620b812 | # User role
ADMIN = 0
STAFF = 1
USER = 2
ROLE = {
ADMIN: 'admin',
STAFF: 'staff',
USER: 'user',
}
# User status
INACTIVE = 0
LOGOUT = 1
LOGIN = 2
PLAY = 3
STATUS = {
INACTIVE: 'inactive',
LOGOUT: 'logout',
LOGIN: 'login',
PLAY: 'play',
}
|
py | 1a43e58ed87f52fd7f577e69c9baea7abbea619d | import math
def fibonacciIterative(n):
if(n == 0):
return 0
if(n == 1):
return 1
first = 0
second = 1
for i in range(1,n):
tmp = first + second
first = second
second = tmp
return second
def main():
n = int(input("Enter a number: "))
if n >=0:
print(f"Fibonacci de {n} es: {fibonacciIterative(n)}")
else:
print("Choose another number")
if __name__ == "__main__":
main()
|
py | 1a43e5bfc8ec305fbcc23faec624a0877490c5cf | # Generated by Django 2.2 on 2019-05-25 13:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('build', '0003_auto_20190525_2355'),
]
operations = [
migrations.AlterField(
model_name='build',
name='part',
field=models.ForeignKey(help_text='Select part to build', limit_choices_to={'active': True, 'buildable': True, 'is_template': False}, on_delete=django.db.models.deletion.CASCADE, related_name='builds', to='part.Part'),
),
]
|
py | 1a43e6b8bfc23b9740e7c00a128c7e7154c5bdad | #!/usr/bin/python3
import os
import sys
import time
import shutil
import hashlib
projectRoot = "https://www.sansay.co.uk/jamstack"
# Parse any options set by the user on the command line.
validBooleanOptions = []
validValueOptions = ["-domainName", "-contentFolderPath", "-jekyllFolderPath", "-buildPassword"]
userOptions = {}
optionCount = 1
while optionCount < len(sys.argv):
if sys.argv[optionCount] in validBooleanOptions:
userOptions[sys.argv[optionCount]] = True
elif sys.argv[optionCount] in validValueOptions:
userOptions[sys.argv[optionCount]] = sys.argv[optionCount+1]
optionCount = optionCount + 1
optionCount = optionCount + 1
def runIfPathMissing(thePath, theCommand):
if not os.path.exists(thePath):
print("Running: " + theCommand)
os.system(theCommand)
def downloadFile(src, dest, mode=None):
print("Copying file " + src + " to " + dest)
os.system("curl -s " + projectRoot + "/" + src + " -o " + dest)
if not mode == None:
os.system("chmod " + mode + " " + dest)
def getUserOption(optionName, theMessage):
if not optionName in userOptions.keys():
userOptions[optionName] = input(theMessage + ": ")
return(userOptions[optionName])
def askUserMenu(theOptions):
for optionCount in range(0, len(theOptions)):
print(str(optionCount+1) + ": " + theOptions[optionCount])
userSelection = input("Selection: ")
return(int(userSelection))
def readFile(theFilename):
fileDataHandle = open(theFilename, "r")
fileData = fileDataHandle.read()
fileDataHandle.close()
return(fileData)
def writeFile(theFilename, theFileData):
fileDataHandle = open(theFilename, "w")
if isinstance(theFileData, list):
fileDataHandle.write("\n".join(theFileData))
else:
fileDataHandle.write(theFileData)
fileDataHandle.close()
def replaceVariables(theFile, theKeyValues):
fileData = readFile(theFile)
for keyValue in theKeyValues.keys():
fileData = fileData.replace("<<" + keyValue + ">>", theKeyValues[keyValue])
writeFile(theFile, fileData)
def runExpect(inputArray):
writeFile("temp.expect", inputArray)
os.system("expect temp.expect")
os.system("rm temp.expect")
print("Installing JAMStack...")
# Make sure dos2unix (line-end conversion utility) is installed.
runIfPathMissing("/usr/bin/dos2unix", "apt-get install -y dos2unix")
# Make sure Pip3 (Python 3 package manager) is installed.
runIfPathMissing("/usr/bin/pip3", "apt-get install -y python3-pip")
# Figure out what version of Python3 we have installed.
pythonVersion = os.popen("ls /usr/local/lib | grep python3").read().strip()
# Make sure Git (source code control client) is installed.
runIfPathMissing("/usr/bin/git", "apt-get install -y git")
# Make sure curl (utility to get files from the web) is installed.
runIfPathMissing("/usr/bin/curl", "apt-get install -y curl")
# Make sure build-essential (Debian build environment, should include most tools you need to build other packages) is installed.
runIfPathMissing("/usr/share/doc/build-essential", "apt-get install -y build-essential")
# Make sure ZLib (compression library, required for building other packages) is installed.
runIfPathMissing("/usr/share/doc/zlib1g-dev", "apt-get install -y zlib1g-dev")
# Make sure ruby-dev (the Ruby development environment, needed for Jekyll) is installed.
runIfPathMissing("/usr/share/doc/ruby-dev", "apt-get install -y ruby-dev")
# Make sure Jekyll (static site generation tool) is installed.
runIfPathMissing("/usr/local/bin/jekyll", "gem install bundler jekyll concurrent-ruby")
runIfPathMissing("/root/.bundle", "bundle install")
os.system("mkdir /.bundle > /dev/null 2>&1")
os.system("chown www-data:www-data /.bundle > /dev/null 2>&1")
# Make sure Pandoc (conversion utility for converting various file formats, in this case DOCX to Markdown) is installed.
# Note that we need at least version 2.7.1, released March 2019, as it contains a bug fix to handle O365-created DOCX files properly - the version included by Debian Stretch is not yet up to date.
runIfPathMissing("/usr/bin/pandoc", "wget https://github.com/jgm/pandoc/releases/download/2.7.1/pandoc-2.7.1-1-amd64.deb; dpkg -i pandoc-2.7.1-1-amd64.deb; rm pandoc-2.7.1-1-amd64.deb")
# Make sure Flask (Python web-publishing framework) is installed.
runIfPathMissing("/usr/local/lib/"+pythonVersion+"/dist-packages/flask", "pip3 install flask")
# Make sure XLRD (Python library for handling Excel files, required for Excel support in Pandas) is installed.
runIfPathMissing("/usr/local/lib/"+pythonVersion+"/dist-packages/xlrd", "pip3 install xlrd")
# Make sure Pandas (Python data-analysis library) is installed.
runIfPathMissing("/usr/local/lib/"+pythonVersion+"/dist-packages/pandas", "pip3 install pandas")
# Make sure Numpy (Python maths library) is installed.
runIfPathMissing("/usr/local/lib/"+pythonVersion+"/dist-packages/numpy", "pip3 install numpy")
# Make sure Expect (command-line automation utility) is installed.
runIfPathMissing("/usr/bin/expect", "apt-get -y install expect")
# Make sure rclone (for mounting cloud-based filesystems such as Google Drive) is installed.
runIfPathMissing("/usr/bin/rclone", "curl https://rclone.org/install.sh | sudo bash")
# Make sure FUSE (for mounting user filesystems, used by rclone) is installed.
runIfPathMissing("/usr/bin/fusermount", "apt-get -y install fuse")
# Make sure Caddy (web server) is installed.
runIfPathMissing("/usr/bin/caddy", "echo \"deb [trusted=yes] https://apt.fury.io/caddy/ /\" | sudo tee -a /etc/apt/sources.list.d/caddy-fury.list; apt-get update; apt-get install caddy")
getUserOption("-domainName", "Please enter this site's domain name")
# Copy over the Caddy configuration file.
downloadFile("Caddyfile", "/etc/caddy/Caddyfile", mode="0744")
replaceVariables("/etc/caddy/Caddyfile", {"DOMAINNAME":userOptions["-domainName"]})
# Make sure Web Console (simple web user interface for command-line applications) is installed...
os.system("curl -s https://www.sansay.co.uk/web-console/install.sh | sudo bash")
# ...and configured.
if not os.path.exists("/etc/webconsole/tasks/build"):
getUserOption("-buildPassword", "Please enter this site's build password")
os.system("webconsole --new --newTaskID build --newTaskTitle \"Build Site\" --newTaskSecret " + userOptions["-buildPassword"] + " --newTaskPublic N --newTaskCommand \"bash build.sh\"")
downloadFile("webconsoleConfig.csv", "/etc/webconsole/config.csv", mode="0744")
# Make sure Rclone is set up to connect to the user's cloud storage - we might need to ask the user for some details.
if not os.path.exists("/root/.config/rclone/rclone.conf"):
print("Configuring rclone...")
getUserOption("-contentFolderPath", "Please enter the path that contains the content")
getUserOption("-jekyllFolderPath", "Please enter the path that contains the Jekyll setup")
runExpect([
"spawn /usr/bin/rclone config",
"expect \"n/s/q>\"",
"send \"n\\r\"",
"expect \"name>\"",
"send \"drive\\r\"",
"expect \"Storage>\"",
"send \"drive\\r\"",
"expect \"client_id>\"",
"expect_user -timeout 3600 -re \"(.*)\\n\"",
"send \"$expect_out(1,string)\\r\"",
"expect \"client_secret>\"",
"expect_user -timeout 3600 -re \"(.*)\\n\"",
"send \"$expect_out(1,string)\\r\"",
"expect \"scope>\"",
"send \"drive.readonly\\r\"",
"expect \"root_folder_id>\"",
"send \"\\r\"",
"expect \"service_account_file>\"",
"send \"\\r\"",
"expect \"y/n>\"",
"send \"n\\r\"",
"expect \"y/n>\"",
"send \"n\\r\"",
"expect \"Enter verification code>\"",
"expect_user -timeout 3600 -re \"(.*)\\n\"",
"send \"$expect_out(1,string)\\r\"",
"expect \"y/n>\"",
"send \"n\\r\"",
"expect \"y/e/d>\"",
"send \"y\\r\"",
"expect \"e/n/d/r/c/s/q>\"",
"send \"n\\r\"",
"expect \"name>\"",
"send \"content\\r\"",
"expect \"Storage>\"",
"send \"cache\\r\"",
"expect \"remote>\"",
"send \"drive:"+userOptions["-contentFolderPath"]+"\\r\"",
"expect \"plex_url>\"",
"send \"\\r\"",
"expect \"plex_username>\"",
"send \"\\r\"",
"expect \"y/g/n>\"",
"send \"n\\r\"",
"expect \"chunk_size>\"",
"send \"10M\\r\"",
"expect \"info_age>\"",
"send \"1y\\r\"",
"expect \"chunk_total_size>\"",
"send \"1G\\r\"",
"expect \"y/n>\"",
"send \"n\\r\"",
"expect \"y/e/d>\"",
"send \"y\\r\"",
"expect \"e/n/d/r/c/s/q>\"",
"send \"n\\r\"",
"expect \"name>\"",
"send \"jekyll\\r\"",
"expect \"Storage>\"",
"send \"cache\\r\"",
"expect \"remote>\"",
"send \"drive:"+userOptions["-jekyllFolderPath"]+"\\r\"",
"expect \"plex_url>\"",
"send \"\\r\"",
"expect \"plex_username>\"",
"send \"\\r\"",
"expect \"y/g/n>\"",
"send \"n\\r\"",
"expect \"chunk_size>\"",
"send \"10M\\r\"",
"expect \"info_age>\"",
"send \"1y\\r\"",
"expect \"chunk_total_size>\"",
"send \"1G\\r\"",
"expect \"y/n>\"",
"send \"n\\r\"",
"expect \"y/e/d>\"",
"send \"y\\r\"",
"send \"q\\r\""
])
# Set up rclone to mount the user's cloud storage - first, stop any existing rclone mount process...
os.system("systemctl stop rclone-content")
os.system("systemctl stop rclone-jekyll")
# ...make sure FUSE is configured to allow non-root users to access mounts...
downloadFile("fuse.conf", "/etc/fuse.conf", mode="644")
# ...make sure the mount point and cache folders exist...
os.makedirs("/mnt/content", exist_ok=True)
os.makedirs("/mnt/jekyll", exist_ok=True)
os.makedirs("/var/cache/rclone-content", exist_ok=True)
os.makedirs("/var/cache/rclone-jekyll", exist_ok=True)
# ...then set up systemd to mount the repository.
downloadFile("rclone-content.service", "/etc/systemd/system/rclone-content.service", mode="644")
downloadFile("rclone-jekyll.service", "/etc/systemd/system/rclone-jekyll.service", mode="644")
os.system("systemctl start rclone-content")
os.system("systemctl start rclone-jekyll")
os.system("systemctl enable rclone-content")
os.system("systemctl enable rclone-jekyll")
# Copy accross the build.sh script.
downloadFile("build.sh", "/etc/webconsole/tasks/build/build.sh", mode="755")
# Copy over the Python scipt that cleans up HTML files.
downloadFile("tidyHTML.py", "/usr/local/bin/tidyHTML.py", mode="0755")
os.system("chown www-data:www-data /usr/local/bin/tidyHTML.py")
# Install DocsToMarkdown.
runIfPathMissing("/usr/local/bin/docsToMarkdown.py", "curl https://raw.githubusercontent.com/dhicks6345789/docs-to-markdown/master/docsToMarkdown.py -o /usr/local/bin/docsToMarkdown.py; chmod a+x /usr/local/bin/docsToMarkdown.py; echo > /var/log/build.log; chown www-data:www-data /var/log/build.log")
runIfPathMissing("/var/local/jekyll", "mkdir /var/local/jekyll; chown www-data:www-data /var/local/jekyll")
downloadFile("docsToMarkdown.json", "/var/local/docsToMarkdown.json", mode="644")
os.system("chown www-data:www-data /var/local/docsToMarkdown.json")
|
py | 1a43e71e24205cf403ed5943cda113366d1c0607 | from socket import *
serverName = '0-pc'
serverPort = 12001
clientSocket = socket(AF_INET, SOCK_STREAM)
clientSocket.connect((serverName, serverPort))
sentence = input("input lowercase sentence")
clientSocket.send(sentence.encode())#字符串转化为字节类型
modifiedSentence = clientSocket.recv(1024)
print("from server:", modifiedSentence.decode())
clientSocket.close()
|
py | 1a43e7c7fc7af67cf1dac192ef07f0e0c4db0fee | import json
import os
from torch.utils.data import Dataset
from config import global_config
from pipeline.actor import Actor
class EvalDataset(Dataset):
def __init__(self, dataset_file, memory, controller):
with open(dataset_file, "r") as f:
self.dataset = json.load(f)
self.folder = os.path.dirname(dataset_file)
self.memory = memory
self.lut = Actor._make_depth_correction(global_config.resolution, global_config.resolution, 90)
self.controller = controller
def __len__(self):
return len(self.dataset)
def load_meta(self, thor_meta):
scene = thor_meta["scene"]
seed = thor_meta["seed"]
position = thor_meta["position"]
rotation = thor_meta["rotation"]
horizon = thor_meta["horizon"]
self.controller.reset(scene)
self.controller.step(action='InitialRandomSpawn', seed=seed,
forceVisible=True, numPlacementAttempts=5)
self.controller.step(action='MakeAllObjectsMoveable')
event = self.controller.step(action='TeleportFull', x=position['x'], y=position['y'],
z=position['z'], rotation=rotation, horizon=horizon)
return event
def __getitem__(self, item):
entry = self.dataset[item]
evt = self.load_meta(entry["thor_meta"])
rgb = evt.frame.copy()
if global_config.depth:
dist = (evt.depth_frame.copy() - .1) * self.lut
rgbd = self.memory.base_image_transform((rgb, dist))
else:
rgbd = self.memory.base_image_transform(rgb)
return rgbd, entry["image_id"]
class ActiveDataset(EvalDataset):
def __init__(self, dataset_file, memory, controller, conn):
super().__init__(dataset_file, memory, controller)
self.conn = conn
def process(self):
while True:
item = self.conn.recv()
if item is None:
break
self.conn.send(self.__getitem__(item))
|
py | 1a43e7f44aee3595d2e92b4ce29dffb33b8a8d88 | from datasette.plugins import DEFAULT_PLUGINS
from datasette.utils import detect_json1
from datasette.version import __version__
from .fixtures import ( # noqa
app_client,
app_client_no_files,
app_client_with_hash,
app_client_shorter_time_limit,
app_client_larger_cache_size,
app_client_returned_rows_matches_page_size,
app_client_two_attached_databases,
app_client_two_attached_databases_one_immutable,
app_client_conflicting_database_names,
app_client_with_cors,
app_client_with_dot,
app_client_immutable_and_inspect_file,
generate_compound_rows,
generate_sortable_rows,
make_app_client,
EXPECTED_PLUGINS,
METADATA,
)
import json
import pytest
import sys
import urllib
def test_homepage(app_client):
response = app_client.get("/.json")
assert response.status == 200
assert "application/json; charset=utf-8" == response.headers["content-type"]
assert response.json.keys() == {"fixtures": 0}.keys()
d = response.json["fixtures"]
assert d["name"] == "fixtures"
assert d["tables_count"] == 24
assert len(d["tables_and_views_truncated"]) == 5
assert d["tables_and_views_more"] is True
# 4 hidden FTS tables + no_primary_key (hidden in metadata)
assert d["hidden_tables_count"] == 5
# 201 in no_primary_key, plus 5 in other hidden tables:
assert d["hidden_table_rows_sum"] == 206
assert d["views_count"] == 4
def test_homepage_sort_by_relationships(app_client):
response = app_client.get("/.json?_sort=relationships")
assert response.status == 200
tables = [
t["name"] for t in response.json["fixtures"]["tables_and_views_truncated"]
]
assert [
"simple_primary_key",
"complex_foreign_keys",
"roadside_attraction_characteristics",
"searchable_tags",
"foreign_key_references",
] == tables
def test_database_page(app_client):
response = app_client.get("/fixtures.json")
assert response.status == 200
data = response.json
assert "fixtures" == data["database"]
assert [
{
"name": "123_starts_with_digits",
"columns": ["content"],
"primary_keys": [],
"count": 0,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "Table With Space In Name",
"columns": ["pk", "content"],
"primary_keys": ["pk"],
"count": 0,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "attraction_characteristic",
"columns": ["pk", "name"],
"primary_keys": ["pk"],
"count": 2,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [
{
"other_table": "roadside_attraction_characteristics",
"column": "pk",
"other_column": "characteristic_id",
}
],
"outgoing": [],
},
"private": False,
},
{
"name": "binary_data",
"columns": ["data"],
"primary_keys": [],
"count": 3,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "complex_foreign_keys",
"columns": ["pk", "f1", "f2", "f3"],
"primary_keys": ["pk"],
"count": 1,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [],
"outgoing": [
{
"other_table": "simple_primary_key",
"column": "f3",
"other_column": "id",
},
{
"other_table": "simple_primary_key",
"column": "f2",
"other_column": "id",
},
{
"other_table": "simple_primary_key",
"column": "f1",
"other_column": "id",
},
],
},
"private": False,
},
{
"name": "compound_primary_key",
"columns": ["pk1", "pk2", "content"],
"primary_keys": ["pk1", "pk2"],
"count": 1,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "compound_three_primary_keys",
"columns": ["pk1", "pk2", "pk3", "content"],
"primary_keys": ["pk1", "pk2", "pk3"],
"count": 1001,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "custom_foreign_key_label",
"columns": ["pk", "foreign_key_with_custom_label"],
"primary_keys": ["pk"],
"count": 1,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [],
"outgoing": [
{
"other_table": "primary_key_multiple_columns_explicit_label",
"column": "foreign_key_with_custom_label",
"other_column": "id",
}
],
},
"private": False,
},
{
"name": "facet_cities",
"columns": ["id", "name"],
"primary_keys": ["id"],
"count": 4,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [
{
"other_table": "facetable",
"column": "id",
"other_column": "city_id",
}
],
"outgoing": [],
},
"private": False,
},
{
"name": "facetable",
"columns": [
"pk",
"created",
"planet_int",
"on_earth",
"state",
"city_id",
"neighborhood",
"tags",
"complex_array",
"distinct_some_null",
],
"primary_keys": ["pk"],
"count": 15,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [],
"outgoing": [
{
"other_table": "facet_cities",
"column": "city_id",
"other_column": "id",
}
],
},
"private": False,
},
{
"name": "foreign_key_references",
"columns": ["pk", "foreign_key_with_label", "foreign_key_with_no_label"],
"primary_keys": ["pk"],
"count": 2,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [],
"outgoing": [
{
"other_table": "primary_key_multiple_columns",
"column": "foreign_key_with_no_label",
"other_column": "id",
},
{
"other_table": "simple_primary_key",
"column": "foreign_key_with_label",
"other_column": "id",
},
],
},
"private": False,
},
{
"name": "infinity",
"columns": ["value"],
"primary_keys": [],
"count": 3,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "primary_key_multiple_columns",
"columns": ["id", "content", "content2"],
"primary_keys": ["id"],
"count": 1,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [
{
"other_table": "foreign_key_references",
"column": "id",
"other_column": "foreign_key_with_no_label",
}
],
"outgoing": [],
},
"private": False,
},
{
"name": "primary_key_multiple_columns_explicit_label",
"columns": ["id", "content", "content2"],
"primary_keys": ["id"],
"count": 1,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [
{
"other_table": "custom_foreign_key_label",
"column": "id",
"other_column": "foreign_key_with_custom_label",
}
],
"outgoing": [],
},
"private": False,
},
{
"name": "roadside_attraction_characteristics",
"columns": ["attraction_id", "characteristic_id"],
"primary_keys": [],
"count": 5,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [],
"outgoing": [
{
"other_table": "attraction_characteristic",
"column": "characteristic_id",
"other_column": "pk",
},
{
"other_table": "roadside_attractions",
"column": "attraction_id",
"other_column": "pk",
},
],
},
"private": False,
},
{
"name": "roadside_attractions",
"columns": ["pk", "name", "address", "latitude", "longitude"],
"primary_keys": ["pk"],
"count": 4,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [
{
"other_table": "roadside_attraction_characteristics",
"column": "pk",
"other_column": "attraction_id",
}
],
"outgoing": [],
},
"private": False,
},
{
"name": "searchable",
"columns": ["pk", "text1", "text2", "name with . and spaces"],
"primary_keys": ["pk"],
"count": 2,
"hidden": False,
"fts_table": "searchable_fts",
"foreign_keys": {
"incoming": [
{
"other_table": "searchable_tags",
"column": "pk",
"other_column": "searchable_id",
}
],
"outgoing": [],
},
"private": False,
},
{
"name": "searchable_tags",
"columns": ["searchable_id", "tag"],
"primary_keys": ["searchable_id", "tag"],
"count": 2,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [],
"outgoing": [
{"other_table": "tags", "column": "tag", "other_column": "tag"},
{
"other_table": "searchable",
"column": "searchable_id",
"other_column": "pk",
},
],
},
"private": False,
},
{
"name": "select",
"columns": ["group", "having", "and", "json"],
"primary_keys": [],
"count": 1,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "simple_primary_key",
"columns": ["id", "content"],
"primary_keys": ["id"],
"count": 4,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [
{
"other_table": "foreign_key_references",
"column": "id",
"other_column": "foreign_key_with_label",
},
{
"other_table": "complex_foreign_keys",
"column": "id",
"other_column": "f3",
},
{
"other_table": "complex_foreign_keys",
"column": "id",
"other_column": "f2",
},
{
"other_table": "complex_foreign_keys",
"column": "id",
"other_column": "f1",
},
],
"outgoing": [],
},
"private": False,
},
{
"name": "sortable",
"columns": [
"pk1",
"pk2",
"content",
"sortable",
"sortable_with_nulls",
"sortable_with_nulls_2",
"text",
],
"primary_keys": ["pk1", "pk2"],
"count": 201,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "table/with/slashes.csv",
"columns": ["pk", "content"],
"primary_keys": ["pk"],
"count": 1,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "tags",
"columns": ["tag"],
"primary_keys": ["tag"],
"count": 2,
"hidden": False,
"fts_table": None,
"foreign_keys": {
"incoming": [
{
"other_table": "searchable_tags",
"column": "tag",
"other_column": "tag",
}
],
"outgoing": [],
},
"private": False,
},
{
"name": "units",
"columns": ["pk", "distance", "frequency"],
"primary_keys": ["pk"],
"count": 3,
"hidden": False,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "no_primary_key",
"columns": ["content", "a", "b", "c"],
"primary_keys": [],
"count": 201,
"hidden": True,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "searchable_fts",
"columns": ["text1", "text2", "name with . and spaces", "content"],
"primary_keys": [],
"count": 2,
"hidden": True,
"fts_table": "searchable_fts",
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "searchable_fts_content",
"columns": [
"docid",
"c0text1",
"c1text2",
"c2name with . and spaces",
"c3content",
],
"primary_keys": ["docid"],
"count": 2,
"hidden": True,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "searchable_fts_segdir",
"columns": [
"level",
"idx",
"start_block",
"leaves_end_block",
"end_block",
"root",
],
"primary_keys": ["level", "idx"],
"count": 1,
"hidden": True,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
{
"name": "searchable_fts_segments",
"columns": ["blockid", "block"],
"primary_keys": ["blockid"],
"count": 0,
"hidden": True,
"fts_table": None,
"foreign_keys": {"incoming": [], "outgoing": []},
"private": False,
},
] == data["tables"]
def test_no_files_uses_memory_database(app_client_no_files):
response = app_client_no_files.get("/.json")
assert response.status == 200
assert {
":memory:": {
"hash": None,
"color": "f7935d",
"hidden_table_rows_sum": 0,
"hidden_tables_count": 0,
"name": ":memory:",
"show_table_row_counts": False,
"path": "/:memory:",
"table_rows_sum": 0,
"tables_count": 0,
"tables_and_views_more": False,
"tables_and_views_truncated": [],
"views_count": 0,
"private": False,
}
} == response.json
# Try that SQL query
response = app_client_no_files.get(
"/:memory:.json?sql=select+sqlite_version()&_shape=array"
)
assert 1 == len(response.json)
assert ["sqlite_version()"] == list(response.json[0].keys())
def test_database_page_for_database_with_dot_in_name(app_client_with_dot):
response = app_client_with_dot.get("/fixtures.dot.json")
assert 200 == response.status
def test_custom_sql(app_client):
response = app_client.get(
"/fixtures.json?sql=select+content+from+simple_primary_key&_shape=objects"
)
data = response.json
assert {"sql": "select content from simple_primary_key", "params": {}} == data[
"query"
]
assert [
{"content": "hello"},
{"content": "world"},
{"content": ""},
{"content": "RENDER_CELL_DEMO"},
] == data["rows"]
assert ["content"] == data["columns"]
assert "fixtures" == data["database"]
assert not data["truncated"]
def test_sql_time_limit(app_client_shorter_time_limit):
response = app_client_shorter_time_limit.get("/fixtures.json?sql=select+sleep(0.5)")
assert 400 == response.status
assert "SQL Interrupted" == response.json["title"]
def test_custom_sql_time_limit(app_client):
response = app_client.get("/fixtures.json?sql=select+sleep(0.01)")
assert 200 == response.status
response = app_client.get("/fixtures.json?sql=select+sleep(0.01)&_timelimit=5")
assert 400 == response.status
assert "SQL Interrupted" == response.json["title"]
def test_invalid_custom_sql(app_client):
response = app_client.get("/fixtures.json?sql=.schema")
assert response.status == 400
assert response.json["ok"] is False
assert "Statement must be a SELECT" == response.json["error"]
def test_table_json(app_client):
response = app_client.get("/fixtures/simple_primary_key.json?_shape=objects")
assert response.status == 200
data = response.json
assert (
data["query"]["sql"]
== "select id, content from simple_primary_key order by id limit 51"
)
assert data["query"]["params"] == {}
assert data["rows"] == [
{"id": "1", "content": "hello"},
{"id": "2", "content": "world"},
{"id": "3", "content": ""},
{"id": "4", "content": "RENDER_CELL_DEMO"},
]
def test_table_not_exists_json(app_client):
assert {
"ok": False,
"error": "Table not found: blah",
"status": 404,
"title": None,
} == app_client.get("/fixtures/blah.json").json
def test_jsono_redirects_to_shape_objects(app_client_with_hash):
response_1 = app_client_with_hash.get(
"/fixtures/simple_primary_key.jsono", allow_redirects=False
)
response = app_client_with_hash.get(
response_1.headers["Location"], allow_redirects=False
)
assert response.status == 302
assert response.headers["Location"].endswith("?_shape=objects")
def test_table_shape_arrays(app_client):
response = app_client.get("/fixtures/simple_primary_key.json?_shape=arrays")
assert [
["1", "hello"],
["2", "world"],
["3", ""],
["4", "RENDER_CELL_DEMO"],
] == response.json["rows"]
def test_table_shape_arrayfirst(app_client):
response = app_client.get(
"/fixtures.json?"
+ urllib.parse.urlencode(
{
"sql": "select content from simple_primary_key order by id",
"_shape": "arrayfirst",
}
)
)
assert ["hello", "world", "", "RENDER_CELL_DEMO"] == response.json
def test_table_shape_objects(app_client):
response = app_client.get("/fixtures/simple_primary_key.json?_shape=objects")
assert [
{"id": "1", "content": "hello"},
{"id": "2", "content": "world"},
{"id": "3", "content": ""},
{"id": "4", "content": "RENDER_CELL_DEMO"},
] == response.json["rows"]
def test_table_shape_array(app_client):
response = app_client.get("/fixtures/simple_primary_key.json?_shape=array")
assert [
{"id": "1", "content": "hello"},
{"id": "2", "content": "world"},
{"id": "3", "content": ""},
{"id": "4", "content": "RENDER_CELL_DEMO"},
] == response.json
def test_table_shape_array_nl(app_client):
response = app_client.get("/fixtures/simple_primary_key.json?_shape=array&_nl=on")
lines = response.text.split("\n")
results = [json.loads(line) for line in lines]
assert [
{"id": "1", "content": "hello"},
{"id": "2", "content": "world"},
{"id": "3", "content": ""},
{"id": "4", "content": "RENDER_CELL_DEMO"},
] == results
def test_table_shape_invalid(app_client):
response = app_client.get("/fixtures/simple_primary_key.json?_shape=invalid")
assert {
"ok": False,
"error": "Invalid _shape: invalid",
"status": 400,
"title": None,
} == response.json
def test_table_shape_object(app_client):
response = app_client.get("/fixtures/simple_primary_key.json?_shape=object")
assert {
"1": {"id": "1", "content": "hello"},
"2": {"id": "2", "content": "world"},
"3": {"id": "3", "content": ""},
"4": {"id": "4", "content": "RENDER_CELL_DEMO"},
} == response.json
def test_table_shape_object_compound_primary_Key(app_client):
response = app_client.get("/fixtures/compound_primary_key.json?_shape=object")
assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json
@pytest.mark.xfail
def test_table_with_slashes_in_name(app_client):
response = app_client.get(
"/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json"
)
assert response.status == 200
data = response.json
assert data["rows"] == [{"pk": "3", "content": "hey"}]
def test_table_with_reserved_word_name(app_client):
response = app_client.get("/fixtures/select.json?_shape=objects")
assert response.status == 200
data = response.json
assert data["rows"] == [
{
"rowid": 1,
"group": "group",
"having": "having",
"and": "and",
"json": '{"href": "http://example.com/", "label":"Example"}',
}
]
@pytest.mark.parametrize(
"path,expected_rows,expected_pages",
[
("/fixtures/no_primary_key.json", 201, 5),
("/fixtures/paginated_view.json", 201, 9),
("/fixtures/no_primary_key.json?_size=25", 201, 9),
("/fixtures/paginated_view.json?_size=50", 201, 5),
("/fixtures/paginated_view.json?_size=max", 201, 3),
("/fixtures/123_starts_with_digits.json", 0, 1),
# Ensure faceting doesn't break pagination:
("/fixtures/compound_three_primary_keys.json?_facet=pk1", 1001, 21),
# Paginating while sorted by an expanded foreign key should work
(
"/fixtures/roadside_attraction_characteristics.json?_size=2&_sort=attraction_id&_labels=on",
5,
3,
),
],
)
def test_paginate_tables_and_views(app_client, path, expected_rows, expected_pages):
fetched = []
count = 0
while path:
response = app_client.get(path)
assert 200 == response.status
count += 1
fetched.extend(response.json["rows"])
path = response.json["next_url"]
if path:
assert urllib.parse.urlencode({"_next": response.json["next"]}) in path
path = path.replace("http://localhost", "")
assert count < 30, "Possible infinite loop detected"
assert expected_rows == len(fetched)
assert expected_pages == count
@pytest.mark.parametrize(
"path,expected_error",
[
("/fixtures/no_primary_key.json?_size=-4", "_size must be a positive integer"),
("/fixtures/no_primary_key.json?_size=dog", "_size must be a positive integer"),
("/fixtures/no_primary_key.json?_size=1001", "_size must be <= 100"),
],
)
def test_validate_page_size(app_client, path, expected_error):
response = app_client.get(path)
assert expected_error == response.json["error"]
assert 400 == response.status
def test_page_size_zero(app_client):
"For _size=0 we return the counts, empty rows and no continuation token"
response = app_client.get("/fixtures/no_primary_key.json?_size=0")
assert 200 == response.status
assert [] == response.json["rows"]
assert 201 == response.json["filtered_table_rows_count"]
assert None is response.json["next"]
assert None is response.json["next_url"]
def test_paginate_compound_keys(app_client):
fetched = []
path = "/fixtures/compound_three_primary_keys.json?_shape=objects"
page = 0
while path:
page += 1
response = app_client.get(path)
fetched.extend(response.json["rows"])
path = response.json["next_url"]
if path:
path = path.replace("http://localhost", "")
assert page < 100
assert 1001 == len(fetched)
assert 21 == page
# Should be correctly ordered
contents = [f["content"] for f in fetched]
expected = [r[3] for r in generate_compound_rows(1001)]
assert expected == contents
def test_paginate_compound_keys_with_extra_filters(app_client):
fetched = []
path = (
"/fixtures/compound_three_primary_keys.json?content__contains=d&_shape=objects"
)
page = 0
while path:
page += 1
assert page < 100
response = app_client.get(path)
fetched.extend(response.json["rows"])
path = response.json["next_url"]
if path:
path = path.replace("http://localhost", "")
assert 2 == page
expected = [r[3] for r in generate_compound_rows(1001) if "d" in r[3]]
assert expected == [f["content"] for f in fetched]
@pytest.mark.parametrize(
"query_string,sort_key,human_description_en",
[
("_sort=sortable", lambda row: row["sortable"], "sorted by sortable"),
(
"_sort_desc=sortable",
lambda row: -row["sortable"],
"sorted by sortable descending",
),
(
"_sort=sortable_with_nulls",
lambda row: (
1 if row["sortable_with_nulls"] is not None else 0,
row["sortable_with_nulls"],
),
"sorted by sortable_with_nulls",
),
(
"_sort_desc=sortable_with_nulls",
lambda row: (
1 if row["sortable_with_nulls"] is None else 0,
-row["sortable_with_nulls"]
if row["sortable_with_nulls"] is not None
else 0,
row["content"],
),
"sorted by sortable_with_nulls descending",
),
# text column contains '$null' - ensure it doesn't confuse pagination:
("_sort=text", lambda row: row["text"], "sorted by text"),
],
)
def test_sortable(app_client, query_string, sort_key, human_description_en):
path = "/fixtures/sortable.json?_shape=objects&{}".format(query_string)
fetched = []
page = 0
while path:
page += 1
assert page < 100
response = app_client.get(path)
assert human_description_en == response.json["human_description_en"]
fetched.extend(response.json["rows"])
path = response.json["next_url"]
if path:
path = path.replace("http://localhost", "")
assert 5 == page
expected = list(generate_sortable_rows(201))
expected.sort(key=sort_key)
assert [r["content"] for r in expected] == [r["content"] for r in fetched]
def test_sortable_and_filtered(app_client):
path = (
"/fixtures/sortable.json"
"?content__contains=d&_sort_desc=sortable&_shape=objects"
)
response = app_client.get(path)
fetched = response.json["rows"]
assert (
'where content contains "d" sorted by sortable descending'
== response.json["human_description_en"]
)
expected = [row for row in generate_sortable_rows(201) if "d" in row["content"]]
assert len(expected) == response.json["filtered_table_rows_count"]
expected.sort(key=lambda row: -row["sortable"])
assert [r["content"] for r in expected] == [r["content"] for r in fetched]
def test_sortable_argument_errors(app_client):
response = app_client.get("/fixtures/sortable.json?_sort=badcolumn")
assert "Cannot sort table by badcolumn" == response.json["error"]
response = app_client.get("/fixtures/sortable.json?_sort_desc=badcolumn2")
assert "Cannot sort table by badcolumn2" == response.json["error"]
response = app_client.get(
"/fixtures/sortable.json?_sort=sortable_with_nulls&_sort_desc=sortable"
)
assert "Cannot use _sort and _sort_desc at the same time" == response.json["error"]
def test_sortable_columns_metadata(app_client):
response = app_client.get("/fixtures/sortable.json?_sort=content")
assert "Cannot sort table by content" == response.json["error"]
# no_primary_key has ALL sort options disabled
for column in ("content", "a", "b", "c"):
response = app_client.get("/fixtures/sortable.json?_sort={}".format(column))
assert "Cannot sort table by {}".format(column) == response.json["error"]
@pytest.mark.parametrize(
"path,expected_rows",
[
(
"/fixtures/searchable.json?_search=dog",
[
[1, "barry cat", "terry dog", "panther"],
[2, "terry dog", "sara weasel", "puma"],
],
),
(
# Special keyword shouldn't break FTS query
"/fixtures/searchable.json?_search=AND",
[],
),
(
# Without _searchmode=raw this should return no results
"/fixtures/searchable.json?_search=te*+AND+do*",
[],
),
(
# _searchmode=raw
"/fixtures/searchable.json?_search=te*+AND+do*&_searchmode=raw",
[
[1, "barry cat", "terry dog", "panther"],
[2, "terry dog", "sara weasel", "puma"],
],
),
(
"/fixtures/searchable.json?_search=weasel",
[[2, "terry dog", "sara weasel", "puma"]],
),
(
"/fixtures/searchable.json?_search_text2=dog",
[[1, "barry cat", "terry dog", "panther"]],
),
(
"/fixtures/searchable.json?_search_name%20with%20.%20and%20spaces=panther",
[[1, "barry cat", "terry dog", "panther"]],
),
],
)
def test_searchable(app_client, path, expected_rows):
response = app_client.get(path)
assert expected_rows == response.json["rows"]
@pytest.mark.parametrize(
"path,expected_rows",
[
(
"/fixtures/searchable_view_configured_by_metadata.json?_search=weasel",
[[2, "terry dog", "sara weasel", "puma"]],
),
# This should return all results because search is not configured:
(
"/fixtures/searchable_view.json?_search=weasel",
[
[1, "barry cat", "terry dog", "panther"],
[2, "terry dog", "sara weasel", "puma"],
],
),
(
"/fixtures/searchable_view.json?_search=weasel&_fts_table=searchable_fts&_fts_pk=pk",
[[2, "terry dog", "sara weasel", "puma"]],
),
],
)
def test_searchable_views(app_client, path, expected_rows):
response = app_client.get(path)
assert expected_rows == response.json["rows"]
def test_searchable_invalid_column(app_client):
response = app_client.get("/fixtures/searchable.json?_search_invalid=x")
assert 400 == response.status
assert {
"ok": False,
"error": "Cannot search by that column",
"status": 400,
"title": None,
} == response.json
@pytest.mark.parametrize(
"path,expected_rows",
[
("/fixtures/simple_primary_key.json?content=hello", [["1", "hello"]]),
(
"/fixtures/simple_primary_key.json?content__contains=o",
[["1", "hello"], ["2", "world"], ["4", "RENDER_CELL_DEMO"]],
),
("/fixtures/simple_primary_key.json?content__exact=", [["3", ""]]),
(
"/fixtures/simple_primary_key.json?content__not=world",
[["1", "hello"], ["3", ""], ["4", "RENDER_CELL_DEMO"]],
),
],
)
def test_table_filter_queries(app_client, path, expected_rows):
response = app_client.get(path)
assert expected_rows == response.json["rows"]
def test_table_filter_queries_multiple_of_same_type(app_client):
response = app_client.get(
"/fixtures/simple_primary_key.json?content__not=world&content__not=hello"
)
assert [["3", ""], ["4", "RENDER_CELL_DEMO"]] == response.json["rows"]
@pytest.mark.skipif(not detect_json1(), reason="Requires the SQLite json1 module")
def test_table_filter_json_arraycontains(app_client):
response = app_client.get("/fixtures/facetable.json?tags__arraycontains=tag1")
assert [
[
1,
"2019-01-14 08:00:00",
1,
1,
"CA",
1,
"Mission",
'["tag1", "tag2"]',
'[{"foo": "bar"}]',
"one",
],
[
2,
"2019-01-14 08:00:00",
1,
1,
"CA",
1,
"Dogpatch",
'["tag1", "tag3"]',
"[]",
"two",
],
] == response.json["rows"]
def test_table_filter_extra_where(app_client):
response = app_client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'")
assert [
[
2,
"2019-01-14 08:00:00",
1,
1,
"CA",
1,
"Dogpatch",
'["tag1", "tag3"]',
"[]",
"two",
]
] == response.json["rows"]
def test_table_filter_extra_where_invalid(app_client):
response = app_client.get("/fixtures/facetable.json?_where=neighborhood=Dogpatch'")
assert 400 == response.status
assert "Invalid SQL" == response.json["title"]
def test_table_filter_extra_where_disabled_if_no_sql_allowed():
with make_app_client(metadata={"allow_sql": {}}) as client:
response = client.get("/fixtures/facetable.json?_where=neighborhood='Dogpatch'")
assert 403 == response.status
assert "_where= is not allowed" == response.json["error"]
def test_table_through(app_client):
# Just the museums:
response = app_client.get(
'/fixtures/roadside_attractions.json?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}'
)
assert [
[
3,
"Burlingame Museum of PEZ Memorabilia",
"214 California Drive, Burlingame, CA 94010",
37.5793,
-122.3442,
],
[
4,
"Bigfoot Discovery Museum",
"5497 Highway 9, Felton, CA 95018",
37.0414,
-122.0725,
],
] == response.json["rows"]
assert (
'where roadside_attraction_characteristics.characteristic_id = "1"'
== response.json["human_description_en"]
)
def test_max_returned_rows(app_client):
response = app_client.get("/fixtures.json?sql=select+content+from+no_primary_key")
data = response.json
assert {"sql": "select content from no_primary_key", "params": {}} == data["query"]
assert data["truncated"]
assert 100 == len(data["rows"])
def test_view(app_client):
response = app_client.get("/fixtures/simple_view.json?_shape=objects")
assert response.status == 200
data = response.json
assert data["rows"] == [
{"upper_content": "HELLO", "content": "hello"},
{"upper_content": "WORLD", "content": "world"},
{"upper_content": "", "content": ""},
{"upper_content": "RENDER_CELL_DEMO", "content": "RENDER_CELL_DEMO"},
]
def test_row(app_client):
response = app_client.get("/fixtures/simple_primary_key/1.json?_shape=objects")
assert response.status == 200
assert [{"id": "1", "content": "hello"}] == response.json["rows"]
def test_row_format_in_querystring(app_client):
# regression test for https://github.com/simonw/datasette/issues/563
response = app_client.get(
"/fixtures/simple_primary_key/1?_format=json&_shape=objects"
)
assert response.status == 200
assert [{"id": "1", "content": "hello"}] == response.json["rows"]
@pytest.mark.xfail
def test_row_strange_table_name(app_client):
response = app_client.get(
"/fixtures/table%2Fwith%2Fslashes.csv/3.json?_shape=objects"
)
assert response.status == 200
assert [{"pk": "3", "content": "hey"}] == response.json["rows"]
def test_row_foreign_key_tables(app_client):
response = app_client.get(
"/fixtures/simple_primary_key/1.json?_extras=foreign_key_tables"
)
assert response.status == 200
assert [
{
"column": "id",
"count": 1,
"other_column": "foreign_key_with_label",
"other_table": "foreign_key_references",
},
{
"column": "id",
"count": 1,
"other_column": "f3",
"other_table": "complex_foreign_keys",
},
{
"column": "id",
"count": 0,
"other_column": "f2",
"other_table": "complex_foreign_keys",
},
{
"column": "id",
"count": 1,
"other_column": "f1",
"other_table": "complex_foreign_keys",
},
] == response.json["foreign_key_tables"]
def test_unit_filters(app_client):
response = app_client.get(
"/fixtures/units.json?distance__lt=75km&frequency__gt=1kHz"
)
assert response.status == 200
data = response.json
assert data["units"]["distance"] == "m"
assert data["units"]["frequency"] == "Hz"
assert len(data["rows"]) == 1
assert data["rows"][0][0] == 2
def test_databases_json(app_client_two_attached_databases_one_immutable):
response = app_client_two_attached_databases_one_immutable.get("/-/databases.json")
databases = response.json
assert 2 == len(databases)
extra_database, fixtures_database = databases
assert "extra database" == extra_database["name"]
assert None == extra_database["hash"]
assert True == extra_database["is_mutable"]
assert False == extra_database["is_memory"]
assert "fixtures" == fixtures_database["name"]
assert fixtures_database["hash"] is not None
assert False == fixtures_database["is_mutable"]
assert False == fixtures_database["is_memory"]
def test_metadata_json(app_client):
response = app_client.get("/-/metadata.json")
assert METADATA == response.json
def test_threads_json(app_client):
response = app_client.get("/-/threads.json")
expected_keys = {"threads", "num_threads"}
if sys.version_info >= (3, 7, 0):
expected_keys.update({"tasks", "num_tasks"})
assert expected_keys == set(response.json.keys())
def test_plugins_json(app_client):
response = app_client.get("/-/plugins.json")
assert EXPECTED_PLUGINS == sorted(response.json, key=lambda p: p["name"])
# Try with ?all=1
response = app_client.get("/-/plugins.json?all=1")
names = {p["name"] for p in response.json}
assert names.issuperset(p["name"] for p in EXPECTED_PLUGINS)
assert names.issuperset(DEFAULT_PLUGINS)
def test_versions_json(app_client):
response = app_client.get("/-/versions.json")
assert "python" in response.json
assert "3.0" == response.json.get("asgi")
assert "version" in response.json["python"]
assert "full" in response.json["python"]
assert "datasette" in response.json
assert "version" in response.json["datasette"]
assert response.json["datasette"]["version"] == __version__
assert "sqlite" in response.json
assert "version" in response.json["sqlite"]
assert "fts_versions" in response.json["sqlite"]
assert "compile_options" in response.json["sqlite"]
def test_config_json(app_client):
response = app_client.get("/-/config.json")
assert {
"default_page_size": 50,
"default_facet_size": 30,
"facet_suggest_time_limit_ms": 50,
"facet_time_limit_ms": 200,
"max_returned_rows": 100,
"sql_time_limit_ms": 200,
"allow_download": True,
"allow_facet": True,
"suggest_facets": True,
"default_cache_ttl": 5,
"default_cache_ttl_hashed": 365 * 24 * 60 * 60,
"num_sql_threads": 1,
"cache_size_kb": 0,
"allow_csv_stream": True,
"max_csv_mb": 100,
"truncate_cells_html": 2048,
"force_https_urls": False,
"hash_urls": False,
"template_debug": False,
"base_url": "/",
} == response.json
def test_page_size_matching_max_returned_rows(
app_client_returned_rows_matches_page_size,
):
fetched = []
path = "/fixtures/no_primary_key.json"
while path:
response = app_client_returned_rows_matches_page_size.get(path)
fetched.extend(response.json["rows"])
assert len(response.json["rows"]) in (1, 50)
path = response.json["next_url"]
if path:
path = path.replace("http://localhost", "")
assert 201 == len(fetched)
@pytest.mark.parametrize(
"path,expected_facet_results",
[
(
"/fixtures/facetable.json?_facet=state&_facet=city_id",
{
"state": {
"name": "state",
"hideable": True,
"type": "column",
"toggle_url": "/fixtures/facetable.json?_facet=city_id",
"results": [
{
"value": "CA",
"label": "CA",
"count": 10,
"toggle_url": "_facet=state&_facet=city_id&state=CA",
"selected": False,
},
{
"value": "MI",
"label": "MI",
"count": 4,
"toggle_url": "_facet=state&_facet=city_id&state=MI",
"selected": False,
},
{
"value": "MC",
"label": "MC",
"count": 1,
"toggle_url": "_facet=state&_facet=city_id&state=MC",
"selected": False,
},
],
"truncated": False,
},
"city_id": {
"name": "city_id",
"hideable": True,
"type": "column",
"toggle_url": "/fixtures/facetable.json?_facet=state",
"results": [
{
"value": 1,
"label": "San Francisco",
"count": 6,
"toggle_url": "_facet=state&_facet=city_id&city_id=1",
"selected": False,
},
{
"value": 2,
"label": "Los Angeles",
"count": 4,
"toggle_url": "_facet=state&_facet=city_id&city_id=2",
"selected": False,
},
{
"value": 3,
"label": "Detroit",
"count": 4,
"toggle_url": "_facet=state&_facet=city_id&city_id=3",
"selected": False,
},
{
"value": 4,
"label": "Memnonia",
"count": 1,
"toggle_url": "_facet=state&_facet=city_id&city_id=4",
"selected": False,
},
],
"truncated": False,
},
},
),
(
"/fixtures/facetable.json?_facet=state&_facet=city_id&state=MI",
{
"state": {
"name": "state",
"hideable": True,
"type": "column",
"toggle_url": "/fixtures/facetable.json?_facet=city_id&state=MI",
"results": [
{
"value": "MI",
"label": "MI",
"count": 4,
"selected": True,
"toggle_url": "_facet=state&_facet=city_id",
}
],
"truncated": False,
},
"city_id": {
"name": "city_id",
"hideable": True,
"type": "column",
"toggle_url": "/fixtures/facetable.json?_facet=state&state=MI",
"results": [
{
"value": 3,
"label": "Detroit",
"count": 4,
"selected": False,
"toggle_url": "_facet=state&_facet=city_id&state=MI&city_id=3",
}
],
"truncated": False,
},
},
),
(
"/fixtures/facetable.json?_facet=planet_int",
{
"planet_int": {
"name": "planet_int",
"hideable": True,
"type": "column",
"toggle_url": "/fixtures/facetable.json",
"results": [
{
"value": 1,
"label": 1,
"count": 14,
"selected": False,
"toggle_url": "_facet=planet_int&planet_int=1",
},
{
"value": 2,
"label": 2,
"count": 1,
"selected": False,
"toggle_url": "_facet=planet_int&planet_int=2",
},
],
"truncated": False,
}
},
),
(
# planet_int is an integer field:
"/fixtures/facetable.json?_facet=planet_int&planet_int=1",
{
"planet_int": {
"name": "planet_int",
"hideable": True,
"type": "column",
"toggle_url": "/fixtures/facetable.json?planet_int=1",
"results": [
{
"value": 1,
"label": 1,
"count": 14,
"selected": True,
"toggle_url": "_facet=planet_int",
}
],
"truncated": False,
}
},
),
],
)
def test_facets(app_client, path, expected_facet_results):
response = app_client.get(path)
facet_results = response.json["facet_results"]
# We only compare the querystring portion of the taggle_url
for facet_name, facet_info in facet_results.items():
assert facet_name == facet_info["name"]
assert False is facet_info["truncated"]
for facet_value in facet_info["results"]:
facet_value["toggle_url"] = facet_value["toggle_url"].split("?")[1]
assert expected_facet_results == facet_results
def test_suggested_facets(app_client):
suggestions = [
{
"name": suggestion["name"],
"querystring": suggestion["toggle_url"].split("?")[-1],
}
for suggestion in app_client.get("/fixtures/facetable.json").json[
"suggested_facets"
]
]
expected = [
{"name": "created", "querystring": "_facet=created"},
{"name": "planet_int", "querystring": "_facet=planet_int"},
{"name": "on_earth", "querystring": "_facet=on_earth"},
{"name": "state", "querystring": "_facet=state"},
{"name": "city_id", "querystring": "_facet=city_id"},
{"name": "neighborhood", "querystring": "_facet=neighborhood"},
{"name": "tags", "querystring": "_facet=tags"},
{"name": "complex_array", "querystring": "_facet=complex_array"},
{"name": "created", "querystring": "_facet_date=created"},
]
if detect_json1():
expected.append({"name": "tags", "querystring": "_facet_array=tags"})
assert expected == suggestions
def test_allow_facet_off():
with make_app_client(config={"allow_facet": False}) as client:
assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status
# Should not suggest any facets either:
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
def test_suggest_facets_off():
with make_app_client(config={"suggest_facets": False}) as client:
# Now suggested_facets should be []
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
def test_expand_labels(app_client):
response = app_client.get(
"/fixtures/facetable.json?_shape=object&_labels=1&_size=2"
"&neighborhood__contains=c"
)
assert {
"2": {
"pk": 2,
"created": "2019-01-14 08:00:00",
"planet_int": 1,
"on_earth": 1,
"state": "CA",
"city_id": {"value": 1, "label": "San Francisco"},
"neighborhood": "Dogpatch",
"tags": '["tag1", "tag3"]',
"complex_array": "[]",
"distinct_some_null": "two",
},
"13": {
"pk": 13,
"created": "2019-01-17 08:00:00",
"planet_int": 1,
"on_earth": 1,
"state": "MI",
"city_id": {"value": 3, "label": "Detroit"},
"neighborhood": "Corktown",
"tags": "[]",
"complex_array": "[]",
"distinct_some_null": None,
},
} == response.json
def test_expand_label(app_client):
response = app_client.get(
"/fixtures/foreign_key_references.json?_shape=object"
"&_label=foreign_key_with_label&_size=1"
)
assert {
"1": {
"pk": "1",
"foreign_key_with_label": {"value": "1", "label": "hello"},
"foreign_key_with_no_label": "1",
}
} == response.json
@pytest.mark.parametrize(
"path,expected_cache_control",
[
("/fixtures/facetable.json", "max-age=5"),
("/fixtures/facetable.json?_ttl=invalid", "max-age=5"),
("/fixtures/facetable.json?_ttl=10", "max-age=10"),
("/fixtures/facetable.json?_ttl=0", "no-cache"),
],
)
def test_ttl_parameter(app_client, path, expected_cache_control):
response = app_client.get(path)
assert expected_cache_control == response.headers["Cache-Control"]
@pytest.mark.parametrize(
"path,expected_redirect",
[
("/fixtures/facetable.json?_hash=1", "/fixtures-HASH/facetable.json"),
(
"/fixtures/facetable.json?city_id=1&_hash=1",
"/fixtures-HASH/facetable.json?city_id=1",
),
],
)
def test_hash_parameter(
app_client_two_attached_databases_one_immutable, path, expected_redirect
):
# First get the current hash for the fixtures database
current_hash = app_client_two_attached_databases_one_immutable.ds.databases[
"fixtures"
].hash[:7]
response = app_client_two_attached_databases_one_immutable.get(
path, allow_redirects=False
)
assert response.status == 302
location = response.headers["Location"]
assert expected_redirect.replace("HASH", current_hash) == location
def test_hash_parameter_ignored_for_mutable_databases(app_client):
path = "/fixtures/facetable.json?_hash=1"
response = app_client.get(path, allow_redirects=False)
assert response.status == 200
test_json_columns_default_expected = [
{"intval": 1, "strval": "s", "floatval": 0.5, "jsonval": '{"foo": "bar"}'}
]
@pytest.mark.parametrize(
"extra_args,expected",
[
("", test_json_columns_default_expected),
("&_json=intval", test_json_columns_default_expected),
("&_json=strval", test_json_columns_default_expected),
("&_json=floatval", test_json_columns_default_expected),
(
"&_json=jsonval",
[{"intval": 1, "strval": "s", "floatval": 0.5, "jsonval": {"foo": "bar"}}],
),
],
)
def test_json_columns(app_client, extra_args, expected):
sql = """
select 1 as intval, "s" as strval, 0.5 as floatval,
'{"foo": "bar"}' as jsonval
"""
path = "/fixtures.json?" + urllib.parse.urlencode({"sql": sql, "_shape": "array"})
path += extra_args
response = app_client.get(path)
assert expected == response.json
def test_config_cache_size(app_client_larger_cache_size):
response = app_client_larger_cache_size.get("/fixtures/pragma_cache_size.json")
assert [[-2500]] == response.json["rows"]
def test_config_force_https_urls():
with make_app_client(config={"force_https_urls": True}) as client:
response = client.get("/fixtures/facetable.json?_size=3&_facet=state")
assert response.json["next_url"].startswith("https://")
assert response.json["facet_results"]["state"]["results"][0][
"toggle_url"
].startswith("https://")
assert response.json["suggested_facets"][0]["toggle_url"].startswith("https://")
# Also confirm that request.url and request.scheme are set correctly
response = client.get("/")
assert client.ds._last_request.url.startswith("https://")
assert client.ds._last_request.scheme == "https"
def test_infinity_returned_as_null(app_client):
response = app_client.get("/fixtures/infinity.json?_shape=array")
assert [
{"rowid": 1, "value": None},
{"rowid": 2, "value": None},
{"rowid": 3, "value": 1.5},
] == response.json
def test_infinity_returned_as_invalid_json_if_requested(app_client):
response = app_client.get("/fixtures/infinity.json?_shape=array&_json_infinity=1")
assert [
{"rowid": 1, "value": float("inf")},
{"rowid": 2, "value": float("-inf")},
{"rowid": 3, "value": 1.5},
] == response.json
def test_custom_query_with_unicode_characters(app_client):
response = app_client.get("/fixtures/𝐜𝐢𝐭𝐢𝐞𝐬.json?_shape=array")
assert [{"id": 1, "name": "San Francisco"}] == response.json
def test_trace(app_client):
response = app_client.get("/fixtures/simple_primary_key.json?_trace=1")
data = response.json
assert "_trace" in data
trace_info = data["_trace"]
assert isinstance(trace_info["request_duration_ms"], float)
assert isinstance(trace_info["sum_trace_duration_ms"], float)
assert isinstance(trace_info["num_traces"], int)
assert isinstance(trace_info["traces"], list)
assert len(trace_info["traces"]) == trace_info["num_traces"]
for trace in trace_info["traces"]:
assert isinstance(trace["type"], str)
assert isinstance(trace["start"], float)
assert isinstance(trace["end"], float)
assert trace["duration_ms"] == (trace["end"] - trace["start"]) * 1000
assert isinstance(trace["traceback"], list)
assert isinstance(trace["database"], str)
assert isinstance(trace["sql"], str)
assert isinstance(trace["params"], (list, dict, None.__class__))
@pytest.mark.parametrize(
"path,status_code",
[
("/fixtures.db", 200),
("/fixtures.json", 200),
("/fixtures/no_primary_key.json", 200),
# A 400 invalid SQL query should still have the header:
("/fixtures.json?sql=select+blah", 400),
],
)
def test_cors(app_client_with_cors, path, status_code):
response = app_client_with_cors.get(path)
assert response.status == status_code
assert "*" == response.headers["Access-Control-Allow-Origin"]
@pytest.mark.parametrize(
"path",
(
"/",
".json",
"/searchable",
"/searchable.json",
"/searchable_view",
"/searchable_view.json",
),
)
def test_database_with_space_in_name(app_client_two_attached_databases, path):
response = app_client_two_attached_databases.get("/extra database" + path)
assert response.status == 200
def test_common_prefix_database_names(app_client_conflicting_database_names):
# https://github.com/simonw/datasette/issues/597
assert ["fixtures", "foo", "foo-bar"] == [
d["name"]
for d in app_client_conflicting_database_names.get("/-/databases.json").json
]
for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-bar.json")):
data = app_client_conflicting_database_names.get(path).json
assert db_name == data["database"]
def test_null_foreign_keys_are_not_expanded(app_client):
response = app_client.get(
"/fixtures/foreign_key_references.json?_shape=array&_labels=on"
)
assert [
{
"pk": "1",
"foreign_key_with_label": {"value": "1", "label": "hello"},
"foreign_key_with_no_label": {"value": "1", "label": "1"},
},
{
"pk": "2",
"foreign_key_with_label": None,
"foreign_key_with_no_label": None,
},
] == response.json
def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file):
response = app_client_immutable_and_inspect_file.get("/fixtures/sortable.json")
assert response.json["filtered_table_rows_count"] == 100
@pytest.mark.parametrize(
"path,expected_json,expected_text",
[
(
"/fixtures/binary_data.json?_shape=array",
[
{"rowid": 1, "data": {"$base64": True, "encoded": "FRwCx60F/g=="}},
{"rowid": 2, "data": {"$base64": True, "encoded": "FRwDx60F/g=="}},
{"rowid": 3, "data": None},
],
None,
),
(
"/fixtures/binary_data.json?_shape=array&_nl=on",
None,
(
'{"rowid": 1, "data": {"$base64": true, "encoded": "FRwCx60F/g=="}}\n'
'{"rowid": 2, "data": {"$base64": true, "encoded": "FRwDx60F/g=="}}\n'
'{"rowid": 3, "data": null}'
),
),
],
)
def test_binary_data_in_json(app_client, path, expected_json, expected_text):
response = app_client.get(path)
if expected_json:
assert response.json == expected_json
else:
assert response.text == expected_text
@pytest.mark.parametrize(
"qs",
[
"",
"?_shape=arrays",
"?_shape=arrayfirst",
"?_shape=object",
"?_shape=objects",
"?_shape=array",
"?_shape=array&_nl=on",
],
)
def test_paginate_using_link_header(app_client, qs):
path = "/fixtures/compound_three_primary_keys.json{}".format(qs)
num_pages = 0
while path:
response = app_client.get(path)
assert response.status == 200
num_pages += 1
link = response.headers.get("link")
if link:
assert link.startswith("<")
assert link.endswith('>; rel="next"')
path = link[1:].split(">")[0]
path = path.replace("http://localhost", "")
else:
path = None
assert num_pages == 21
|
py | 1a43ea5880d515a637edf8b1dc9836821a3e033c | # Generated by Django 2.2 on 2020-09-10 19:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('profile_api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProfileFeedItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status_text', models.CharField(max_length=255)),
('created_on', models.DateTimeField(auto_now_add=True)),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
py | 1a43ea8bb191015487a3839cd2868d4234283811 | """Additional in template functions for the lattedb module
"""
from django import template
register = template.Library() # pylint: disable=C0103
@register.inclusion_tag("progress-bar.html")
def render_progress_bar(danger, warning, info, success, total):
if total > 0:
context = {
"danger": danger / total * 100,
"warning": warning / total * 100,
"info": info / total * 100,
"success": success / total * 100,
"total": total,
}
else:
context = {
"danger": 0,
"warning": 0,
"info": 0,
"success": 0,
"total": 0,
}
return context
|
py | 1a43eb7c61892b2f9f4c0205aae169d5fa58e36b | from fastapi import APIRouter, Request, HTTPException, Depends, Query
from fastapi.responses import StreamingResponse
import aiohttp
import csv
import io
router = APIRouter()
@router.get("/battlefy/{tournament_id}")
async def battlefy_seed_csv(request: Request, tournament_id: str):
"""Returns a CSV of teams and players for seeding use"""
async with aiohttp.ClientSession() as session:
async with session.get(f"https://dtmwra1jsgyb0.cloudfront.net/tournaments/{tournament_id}/teams") as resp:
data = await resp.json()
if resp.status != 200:
raise HTTPException(status_code=resp.status, detail=f"{data['error']}")
# If status is 200
# Create in-memory store for csv writer
csv_file = io.StringIO()
csv_writer = csv.writer(csv_file)
csv_writer.writerow(
["team", "player1", "player2", "player3", "player4", "player5", "player6", "player7", "player8"])
for team in data:
team_row = [team['name']]
for p in team.get('players'):
name = p['inGameName']
if name[0] is "=":
name = f".{name}"
team_row.append(name)
csv_writer.writerow(team_row)
# Return CSV
response = StreamingResponse(iter([csv_file.getvalue()]), media_type="text/csv")
response.headers["Content-Disposition"] = "attachment; filename=teams.csv"
return response
|
py | 1a43ebe6d2d9faa1a4c847f4e6a87a302a06d858 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sphinx_py3doc_enhanced_theme
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.extlinks',
'sphinx.ext.ifconfig',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'disentangle'
year = '2020'
author = 'Yukun Chen'
copyright = '{0}, {1}'.format(year, author)
version = release = '0.0.4'
pygments_style = 'trac'
templates_path = ['.']
extlinks = {
'issue': ('https://github.com/yukunchen113/disentangle/issues/%s', '#'),
'pr': ('https://github.com/yukunchen113/disentangle/pull/%s', 'PR #'),
}
html_theme = "sphinx_py3doc_enhanced_theme"
html_theme_path = [sphinx_py3doc_enhanced_theme.get_html_theme_path()]
html_theme_options = {
'githuburl': 'https://github.com/yukunchen113/disentangle/'
}
html_use_smartypants = True
html_last_updated_fmt = '%b %d, %Y'
html_split_index = False
html_sidebars = {
'**': ['searchbox.html', 'globaltoc.html', 'sourcelink.html'],
}
html_short_title = '%s-%s' % (project, version)
napoleon_use_ivar = True
napoleon_use_rtype = False
napoleon_use_param = False
|
py | 1a43ec5599bf2cc81b26178c55dedff1fa64da2a | """
WSGI config for lark project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'lark.settings')
application = get_wsgi_application()
|
py | 1a43ee5e8829d38a488742ff6b97de1fe3fe8a54 | # coding: utf-8
from __future__ import division
import tensorflow as tf
from tensorflow.keras import layers
import numpy as np
import data
def _get_shape(i, o, keepdims):
if (i == 1 or o == 1) and not keepdims:
return [max(i,o),]
else:
return [i, o]
def _slice(tensor, size, i):
"""Gets slice of columns of the tensor"""
return tensor[:, i*size:(i+1)*size]
def weights_Glorot(i, o, name, rng, is_logistic_sigmoid=False, keepdims=False):
#http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf
d = np.sqrt(6. / (i + o))
if is_logistic_sigmoid:
d *= 4.
return tf.Variable(tf.random.uniform(_get_shape(i, o, keepdims), -d, d))
def load(file_path, x, p=None):
import models
import pickle
import numpy as np
with open(file_path, 'rb') as f:
state = pickle.load(f)
Model = getattr(models, state["type"])
rng = np.random
rng.set_state(state["random_state"])
net = Model(
rng=rng,
x=x,
n_hidden=state["n_hidden"]
)
for net_param, state_param in zip(net.params, state["params"]):
net_param.assign(state_param)
return net, (state["learning_rate"], state["validation_ppl_history"], state["epoch"], rng)
class GRUCell(layers.Layer):
def __init__(self, rng, n_in, n_out, minibatch_size):
super(GRUCell, self).__init__()
# Notation from: An Empirical Exploration of Recurrent Network Architectures
self.n_in = n_in
self.n_out = n_out
# Initial hidden state
self.h0 = tf.zeros([minibatch_size, n_out])
# Gate parameters:
self.W_x = weights_Glorot(n_in, n_out*2, 'W_x', rng)
self.W_h = weights_Glorot(n_out, n_out*2, 'W_h', rng)
self.b = tf.Variable(tf.zeros([1, n_out*2]))
# Input parameters
self.W_x_h = weights_Glorot(n_in, n_out, 'W_x_h', rng)
self.W_h_h = weights_Glorot(n_out, n_out, 'W_h_h', rng)
self.b_h = tf.Variable(tf.zeros([1, n_out]))
self.params = [self.W_x, self.W_h, self.b, self.W_x_h, self.W_h_h, self.b_h]
# inputs = x_t, h_tm1
def call(self, inputs):
rz = tf.nn.sigmoid(tf.matmul(inputs[0], self.W_x) + tf.matmul(inputs[1], self.W_h) + self.b)
r = _slice(rz, self.n_out, 0)
z = _slice(rz, self.n_out, 1)
h = tf.nn.tanh(tf.matmul(inputs[0], self.W_x_h) + tf.matmul(inputs[1] * r, self.W_h_h) + self.b_h)
h_t = z * inputs[1] + (1. - z) * h
return h_t
class GRU(tf.keras.Model):
def __init__(self, rng, x, n_hidden):
super(GRU, self).__init__()
self.minibatch_size = tf.shape(x)[1]
self.n_hidden = n_hidden
self.x_vocabulary = data.read_vocabulary(data.WORD_VOCAB_FILE)
self.y_vocabulary = data.read_vocabulary(data.PUNCT_VOCAB_FILE)
self.x_vocabulary_size = len(self.x_vocabulary)
self.y_vocabulary_size = len(self.y_vocabulary)
# input model
self.We = weights_Glorot(self.x_vocabulary_size, n_hidden, 'We', rng) # Share embeddings between forward and backward model
self.GRU_f = GRUCell(rng=rng, n_in=n_hidden, n_out=n_hidden, minibatch_size=self.minibatch_size)
self.GRU_b = GRUCell(rng=rng, n_in=n_hidden, n_out=n_hidden, minibatch_size=self.minibatch_size)
# output model
self.GRU = GRUCell(rng=rng, n_in=n_hidden*2, n_out=n_hidden, minibatch_size=self.minibatch_size)
self.Wy = tf.Variable(tf.zeros([n_hidden, self.y_vocabulary_size]))
self.by = tf.Variable(tf.zeros([1, self.y_vocabulary_size]))
# attention model
n_attention = n_hidden * 2 # to match concatenated forward and reverse model states
self.Wa_h = weights_Glorot(n_hidden, n_attention, 'Wa_h', rng) # output model previous hidden state to attention model weights
self.Wa_c = weights_Glorot(n_attention, n_attention, 'Wa_c', rng) # contexts to attention model weights
self.ba = tf.Variable(tf.zeros([1, n_attention]))
self.Wa_y = weights_Glorot(n_attention, 1, 'Wa_y', rng) # gives weights to contexts
# Late fusion parameters
self.Wf_h = tf.Variable(tf.zeros([n_hidden, n_hidden]))
self.Wf_c = tf.Variable(tf.zeros([n_attention, n_hidden]))
self.Wf_f = tf.Variable(tf.zeros([n_hidden, n_hidden]))
self.bf = tf.Variable(tf.zeros([1, n_hidden]))
self.params = [self.We,
self.Wy, self.by,
self.Wa_h, self.Wa_c, self.ba, self.Wa_y,
self.Wf_h, self.Wf_c, self.Wf_f, self.bf]
self.params += self.GRU.params + self.GRU_f.params + self.GRU_b.params
print([x.shape for x in self.params])
def call(self, inputs, training=None):
# bi-directional recurrence
def input_recurrence(initializer, elems):
x_f_t, x_b_t = elems
h_f_tm1, h_b_tm1 = initializer
h_f_t = self.GRU_f(inputs=(tf.nn.embedding_lookup(self.We, x_f_t), h_f_tm1))
h_b_t = self.GRU_b(inputs=(tf.nn.embedding_lookup(self.We, x_b_t), h_b_tm1))
return [h_f_t, h_b_t]
[h_f_t, h_b_t] = tf.scan(
fn=input_recurrence,
elems=[inputs, inputs[::-1]], # forward and backward sequences
initializer=[self.GRU_f.h0, self.GRU_b.h0]
)
# 0-axis is time steps, 1-axis is batch size and 2-axis is hidden layer size
context = tf.concat([h_f_t, h_b_t[::-1]], axis=2)
#projected_context = tf.matmul(context, self.Wa_c) + self.ba for each tensor slice
projected_context = tf.matmul(context, tf.tile(tf.expand_dims(self.Wa_c, 0), tf.stack([tf.shape(context)[0], 1, 1]))) + self.ba
def output_recurrence(initializer, elems):
x_t = elems
h_tm1, _, _ = initializer
# Attention model
h_a = tf.nn.tanh(projected_context + tf.matmul(h_tm1, self.Wa_h))
#alphas = tf.exp(tf.matmul(h_a, self.Wa_y))
#alphas = tf.reshape(alphas, [tf.shape(alphas)[0], tf.shape(alphas)[1]]) # drop 2-axis (sized 1) is replaced by:
#sess.run(tf.reshape(tf.matmul(tf.reshape(x, [-1, tf.shape(x)[-1]]), tf.expand_dims(z,-1)), tf.shape(x)[:2]))
alphas = tf.exp(tf.reshape(tf.matmul(tf.reshape(h_a, [-1, tf.shape(h_a)[-1]]), tf.expand_dims(self.Wa_y, -1)), tf.shape(h_a)[:2]))
alphas = alphas / tf.reduce_sum(alphas, axis=0, keepdims=True)
weighted_context = tf.reduce_sum(context * alphas[:,:,None], axis=0)
h_t = self.GRU(inputs=(x_t, h_tm1))
# Late fusion
lfc = tf.matmul(weighted_context, self.Wf_c) # late fused context
fw = tf.nn.sigmoid(tf.matmul(lfc, self.Wf_f) + tf.matmul(h_t, self.Wf_h) + self.bf) # fusion weights
hf_t = lfc * fw + h_t # weighted fused context + hidden state
z = tf.matmul(hf_t, self.Wy) + self.by
y_t = z#tf.nn.softmax(z)
return [h_t, hf_t, y_t]
[_, self.last_hidden_states, self.y] = tf.scan(
fn=output_recurrence,
elems=context[1:], # ignore the 1st word in context, because there's no punctuation before that
initializer=[self.GRU.h0, self.GRU.h0, tf.zeros([self.minibatch_size, self.y_vocabulary_size])]
)
return self.y
def cost(y_pred, y_true):
return tf.reduce_sum(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y_pred, labels=y_true))
def save(model, file_path, learning_rate=None, validation_ppl_history=None, best_validation_ppl=None, epoch=None, random_state=None):
import pickle
state = {
"type": model.__class__.__name__,
"n_hidden": model.n_hidden,
"params": [p for p in model.params],
"learning_rate": learning_rate,
"validation_ppl_history": validation_ppl_history,
"epoch": epoch,
"random_state": random_state
}
print([x.shape for x in state["params"]])
with open(file_path, 'wb') as f:
pickle.dump(state, f, protocol=pickle.HIGHEST_PROTOCOL)
|
py | 1a43ef1e4097ad8e68dd4d6e52a7f63d636c90f2 | # #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# <HTTPretty - HTTP client mock for Python>
# Copyright (C) <2011-2020> Gabriel Falcão <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
import os
import time
import socket
from tornado.web import Application
from tornado.web import RequestHandler
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from httpretty import HTTPretty
from httpretty.core import old_socket as true_socket
from multiprocessing import Process
def utf8(s):
if isinstance(s, str):
s = s.encode('utf-8')
return bytes(s)
class BubblesHandler(RequestHandler):
def get(self):
self.write(". o O 0 O o . o O 0 O o . o O 0 O o . o O 0 O o . o O 0 O o .")
class ComeHandler(RequestHandler):
def get(self):
self.write("<- HELLO WORLD ->")
class TornadoServer(object):
is_running = False
def __init__(self, port):
self.port = int(port)
self.process = None
@classmethod
def get_handlers(cls):
return Application([
(r"/go-for-bubbles/?", BubblesHandler),
(r"/come-again/?", ComeHandler),
])
def start(self):
def go(app, port, data={}):
from httpretty import HTTPretty
HTTPretty.disable()
http = HTTPServer(app)
HTTPretty.disable()
http.listen(int(port))
IOLoop.instance().start()
app = self.get_handlers()
data = {}
args = (app, self.port, data)
HTTPretty.disable()
self.process = Process(target=go, args=args)
self.process.start()
time.sleep(1)
def stop(self):
try:
os.kill(self.process.pid, 9)
except OSError:
self.process.terminate()
finally:
self.is_running = False
class TCPServer(object):
def __init__(self, port):
self.port = int(port)
def start(self):
HTTPretty.disable()
def go(port):
from httpretty import HTTPretty
HTTPretty.disable()
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('localhost', port))
s.listen(True)
conn, addr = s.accept()
while True:
data = conn.recv(1024)
conn.send(b"RECEIVED: " + bytes(data))
conn.close()
args = [self.port]
self.process = Process(target=go, args=args)
self.process.start()
time.sleep(1)
def stop(self):
try:
os.kill(self.process.pid, 9)
except OSError:
self.process.terminate()
finally:
self.is_running = False
class TCPClient(object):
def __init__(self, port):
self.port = int(port)
self.sock = true_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect(('localhost', self.port))
def send(self, data):
if isinstance(data, str):
data = data.encode('utf-8')
self.sock.sendall(data)
return self.sock.recv(len(data) + 11)
def close(self):
try:
self.sock.close()
except socket.error:
pass # already closed
def __del__(self):
self.close()
|
py | 1a43efc9231a8e1356b0c8188ba623175800141c | # Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
from yunionclient.common import utils
class Manager(object):
"""
Managers interact with a particular type of API (servers, flavors, images,
etc.) and provide CRUD operations for them.
"""
resource_class = None
service_type = 'compute_v2'
is_admin_api = False
_columns = None
_admin_columns = None
_version = ''
def __init__(self, api):
self.api = api
@property
def columns(self):
cols = []
cols.extend(self._columns)
if self.api.is_system_admin() and self._admin_columns is not None:
cols.extend(self._admin_columns)
return cols
def tenant_id(self):
return self.api.get_default_tenant().get_id()
def tenant_name(self):
return self.api.get_default_tenant().get_name()
def _get_versioned_url(self, url):
if self._version is not None and len(self._version) > 0:
while url.startswith('/'):
url = url[1:]
url = r'/%s/%s' % (self._version, url)
return url
def json_request(self, method, url, **kwargs):
return self.api.json_request(self.service_type, self.is_admin_api,
method, self._get_versioned_url(url), **kwargs)
def raw_request(self, method, url, **kwargs):
return self.api.raw_request(self.service_type, self.is_admin_api,
method, self._get_versioned_url(url), **kwargs)
def get_urllib2_raw_request(self, url, **kwargs):
return self.api.get_urllib2_raw_request(self.service_type,
self.is_admin_api, self._get_versioned_url(url), **kwargs)
def _dict_to_object(self, dict_obj, obj_class):
cls = obj_class
if cls is None:
cls = self.resource_class
if cls is not None:
if isinstance(dict_obj, dict):
return cls(self.api, dict_obj)
elif isinstance(dict_obj, list):
rets = []
for o in dict_obj:
rets.append(self._dict_to_object(o, obj_class))
return rets
else:
return dict_obj
def _get(self, url, response_key, obj_class=None):
resp, body = self.json_request('GET', url)
if len(response_key) == 0:
return body
data = body[response_key]
return self._dict_to_object(data, obj_class)
def _list(self, url, response_key, obj_class=None):
resp, body = self.json_request('GET', url)
if 'total' in body:
total = body['total']
if 'limit' in body:
limit = body['limit']
else:
limit = 0
if 'offset' in body:
offset = body['offset']
else:
offset = 0
else:
total = 0
limit = 0
offset = 0
data = body[response_key]
return (self._dict_to_object(data, obj_class), total, limit, offset)
def _create(self, url, body, response_key, obj_class=None):
resp, body = self.json_request('POST', url, body=body)
return self._dict_to_object(body[response_key], obj_class)
def _delete(self, url, response_key, obj_class=None):
resp, body = self.json_request('DELETE', url)
# DELETE requests may not return a body
if body is not None and response_key in body:
return self._dict_to_object(body[response_key], obj_class)
else:
return None
def _update(self, url, body, response_key, obj_class=None):
resp, body = self.json_request('PUT', url, body=body)
# PUT requests may not return a body
if body is not None and response_key in body:
return self._dict_to_object(body[response_key], obj_class)
else:
return None
def clean_kwargs(kwargs):
newkw = {}
for k in list(kwargs.keys()):
if kwargs[k] is not None:
newkw[k] = kwargs[k]
return newkw
class StandaloneManager(Manager):
@classmethod
def keyword_url(cls):
return cls.keyword.replace(':', '/')
@classmethod
def keyword_plural_url(cls):
return cls.keyword_plural.replace(':', '/')
def get(self, idstr, **kwargs):
url = r'/%s/%s' % (self.keyword_plural_url(), idstr)
newkw = clean_kwargs(kwargs)
if len(newkw) > 0:
url += '?' + utils.urlencode(newkw)
return self._get(url, self.keyword)
def get_specific(self, idstr, spec, **kwargs):
url = r'/%s/%s/%s' % (self.keyword_plural_url(), idstr, spec)
newkw = clean_kwargs(kwargs)
if len(newkw) > 0:
url += '?' + utils.urlencode(newkw)
return self._get(url, self.keyword)
def get_metadata(self, idstr, **kwargs):
return self.get_specific(idstr, 'metadata', **kwargs)
def set_metadata(self, idstr, **kwargs):
return self.perform_action(idstr, 'metadata', **kwargs)
def set_user_metadata(self, idstr, **kwargs):
return self.perform_action(idstr, 'user-metadata', **kwargs)
def get_descendent(self, idstr, desc_cls, desc_idstr, **kwargs):
if desc_idstr is None:
desc_idstr = '_'
url = r'/%s/%s/%s/%s' % (self.keyword_plural_url(), idstr,
desc_cls.keyword_plural_url(),
desc_idstr)
kwargs = clean_kwargs(kwargs)
if len(kwargs) > 0:
url += '?' + utils.urlencode(kwargs)
return self._get(url, desc_cls.keyword)
def get_descendent_specific(self, idstr, desc_cls, desc_idstr,
spec, **kwargs):
if desc_idstr is None:
desc_idstr = '_'
url = r'/%s/%s/%s/%s/%s' % (self.keyword_plural_url(), idstr,
desc_cls.keyword_plural_url(),
desc_idstr, spec)
kwargs = clean_kwargs(kwargs)
if len(kwargs) > 0:
url += '?' + utils.urlencode(kwargs)
return self._get(url, desc_cls.keyword)
def list(self, **kwargs):
url = r'/%s' % (self.keyword_plural_url())
kwargs = clean_kwargs(kwargs)
if len(kwargs) > 0:
url += '?' + utils.urlencode(kwargs)
return self._list(url, self.keyword_plural)
def list_descendent(self, idstr, *args, **kwargs):
url = r'/%s/%s' % (self.keyword_plural_url(), idstr)
if len(args) > 1:
for i in range(0, len(args)-1, 2):
url += r'/%s/%s' % (args[i].keyword_plural_url(), args[i+1])
desc_cls = args[-1]
url += '/' + desc_cls.keyword_plural_url()
kwargs = clean_kwargs(kwargs)
if len(kwargs) > 0:
url += '?' + utils.urlencode(kwargs)
return self._list(url, desc_cls.keyword_plural)
def delete(self, idstr, **kwargs):
url = r'/%s/%s' % (self.keyword_plural_url(), idstr)
kwargs = clean_kwargs(kwargs)
if len(kwargs) > 0:
url += '?' + utils.urlencode(kwargs)
return self._delete(url, self.keyword)
def delete_descendent(self, idstr, desc_cls, desc_idstr, *args, **kwargs):
if desc_idstr is None:
desc_idstr = '_'
url = r'/%s/%s/%s/%s' % (self.keyword_plural_url(), idstr,
desc_cls.keyword_plural_url(),
desc_idstr)
if len(args) > 0:
for i in range(0, len(args), 2):
url += r'/%s/%s' % (args[i].keyword_plural_url(), args[i+1])
kwargs = clean_kwargs(kwargs)
if len(kwargs) > 0:
url += '?' + utils.urlencode(kwargs)
return self._delete(url, desc_cls.keyword)
def create(self, **kwargs):
return self.batch_create(1, **kwargs)
def batch_create(self, count_, **kwargs):
resp_key = self.keyword
body = {}
body[self.keyword] = kwargs
if count_ > 1:
resp_key = self.keyword_plural
body['count'] = count_
url = r'/%s' % (self.keyword_plural_url())
return self._create(url, body, resp_key)
def create_descendent(self, idstr, desc_cls, **kwargs):
return self.batch_create_descendent(idstr, desc_cls, 1, **kwargs)
def batch_create_descendent(self, idstr, desc_cls, count_, **kwargs):
resp_key = self.keyword
body = {}
if count_ > 1:
resp_key = self.keyword_plural
body['count'] = count_
body[desc_cls.keyword] = kwargs
url = r'/%s/%s/%s' % (self.keyword_plural_url(), idstr,
desc_cls.keyword_plural_url())
return self._create(url, body, resp_key)
def update(self, idstr, **kwargs):
body = {}
body[self.keyword] = kwargs
if idstr is None:
url = r'/%s' % self.keyword_plural_url()
else:
url = r'/%s/%s' % (self.keyword_plural_url(), idstr)
return self._update(url, body, self.keyword)
def update_descendent(self, idstr, desc_cls, desc_idstr, *args, **kwargs):
if desc_idstr is None:
desc_idstr = '_'
url = r'/%s/%s/%s/%s' % (self.keyword_plural_url(), idstr,
desc_cls.keyword_plural_url(), desc_idstr)
if len(args) > 0:
for i in range(0, len(args), 2):
url += r'/%s/%s' % (args[i].keyword_plural_url(), args[i+1])
body = {}
body[desc_cls.keyword] = kwargs
return self._update(url, body, desc_cls.keyword)
def perform_action(self, idstr, action, **kwargs):
url = r'/%s/%s/%s' % (self.keyword_plural_url(), idstr, action)
body = {}
body[self.keyword] = kwargs
resp, body = self.json_request('POST', url, body=body)
return body[self.keyword]
def perform_class_action(self, action, **kwargs):
url = r'/%s/%s' % (self.keyword_plural_url(), action)
body = {}
body[self.keyword] = kwargs
resp, body = self.json_request('POST', url, body=body)
return body[self.keyword]
def perform_action_descendent(self, idstr, desc_cls, desc_idstr,
action, **kwargs):
if desc_idstr is None:
desc_idstr = '_'
url = r'/%s/%s/%s/%s/%s' % (self.keyword_plural_url(), idstr,
desc_cls.keyword_plural_url(),
desc_idstr, action)
body = {}
body[desc_cls.keyword] = kwargs
resp, body = self.json_request('POST', url, body=body)
return body[desc_cls.keyword]
class ImageManager(StandaloneManager):
service_type = 'image'
_version = 'v1'
class JointManager(Manager):
@classmethod
def keyword_url(cls):
return cls.keyword.replace(':', '/')
@classmethod
def keyword_plural_url(cls):
return cls.keyword_plural.replace(':', '/')
def get(self, mid, sid):
url = r'/%s/%s/%s/%s' % (self.master_class().keyword_plural_url(),
mid, self.slave_class().keyword_plural_url(), sid)
return self._get(url, self.keyword)
def list(self, **kwargs):
url = r'/%s' % (self.keyword_plural_url())
kwargs = clean_kwargs(kwargs)
if len(kwargs) > 0:
url += '?%s' % utils.urlencode(kwargs)
return self._list(url, self.keyword_plural)
def list_descendent(self, mid, **kwargs):
url = r'/%s/%s/%s' % (self.master_class().keyword_plural_url(),
mid, self.slave_class().keyword_plural_url())
kwargs = clean_kwargs(kwargs)
if len(kwargs) > 0:
url += '?%s' % utils.urlencode(kwargs)
return self._list(url, self.keyword_plural)
def attach(self, mid, sid, **kwargs):
body = {}
body[self.keyword] = kwargs
url = r'/%s/%s/%s/%s' % (self.master_class().keyword_plural_url(),
mid, self.slave_class().keyword_plural_url(), sid)
return self._create(url, body, self.keyword)
def detach(self, mid, sid):
url = r'/%s/%s/%s/%s' % (self.master_class().keyword_plural_url(),
mid, self.slave_class().keyword_plural_url(), sid)
return self._delete(url, self.keyword)
def update(self, mid, sid, **kwargs):
body = {}
body[self.keyword] = kwargs
url = r'/%s/%s/%s/%s' % (self.master_class().keyword_plural_url(),
mid, self.slave_class().keyword_plural_url(), sid)
return self._update(url, body, self.keyword)
class IdentityManager(StandaloneManager):
service_type = 'identity'
_version = 'v3'
class IdentityJointManager(JointManager):
service_type = 'identity'
_version = 'v3'
class ResourceBase(object):
def __init__(self, api, attr_dict):
self._client_api = api
attr_dict = self._normalize_attribute_dict(attr_dict)
for (k, v) in attr_dict.items():
attr_name = k.replace('-', '_')
setattr(self, attr_name, v)
def _normalize_attribute_dict(self, attr_dict):
return attr_dict
def __repr__(self):
reprkeys = sorted(k for k in list(self.__dict__.keys()) if k[0] != '_')
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
return "<%s %s>" % (self.__class__.__name__, info)
def __getitem__(self, key):
if len(key) > 0 and key[0] != '_':
return getattr(self, key, None)
return None
def get(self, key):
return self[key]
def to_dict(self):
d = {}
for k in dir(self):
if k[0] != '_':
v = getattr(self, k, None)
if v is not None:
if not callable(v):
d[k] = v
return d
class MeterManager(StandaloneManager):
service_type = 'meter'
class LoggerManager(StandaloneManager):
service_type = 'log'
|
py | 1a43f01330eebbbeaf76a6d48bb99476e8b5f6ee | """HTTP/1.1 client library
A backport of the Python 3.3 http/client.py module for python-future.
<intro stuff goes here>
<other stuff, too>
HTTPConnection goes through a number of "states", which define when a client
may legally make another request or fetch the response for a particular
request. This diagram details these state transitions:
(null)
|
| HTTPConnection()
v
Idle
|
| putrequest()
v
Request-started
|
| ( putheader() )* endheaders()
v
Request-sent
|
| response = getresponse()
v
Unread-response [Response-headers-read]
|\____________________
| |
| response.read() | putrequest()
v v
Idle Req-started-unread-response
______/|
/ |
response.read() | | ( putheader() )* endheaders()
v v
Request-started Req-sent-unread-response
|
| response.read()
v
Request-sent
This diagram presents the following rules:
-- a second request may not be started until {response-headers-read}
-- a response [object] cannot be retrieved until {request-sent}
-- there is no differentiation between an unread response body and a
partially read response body
Note: this enforcement is applied by the HTTPConnection class. The
HTTPResponse class does not enforce this state machine, which
implies sophisticated clients may accelerate the request/response
pipeline. Caution should be taken, though: accelerating the states
beyond the above pattern may imply knowledge of the server's
connection-close behavior for certain requests. For example, it
is impossible to tell whether the server will close the connection
UNTIL the response headers have been read; this means that further
requests cannot be placed into the pipeline until it is known that
the server will NOT be closing the connection.
Logical State __state __response
------------- ------- ----------
Idle _CS_IDLE None
Request-started _CS_REQ_STARTED None
Request-sent _CS_REQ_SENT None
Unread-response _CS_IDLE <response_class>
Req-started-unread-response _CS_REQ_STARTED <response_class>
Req-sent-unread-response _CS_REQ_SENT <response_class>
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from future.builtins import bytes, int, str, super
from future.utils import PY2
from future.backports.email import parser as email_parser
from future.backports.email import message as email_message
from future.backports.misc import create_connection as socket_create_connection
import io
import os
import socket
import collections
from future.backports.urllib.parse import urlsplit
import warnings
from array import array
__all__ = ["HTTPResponse", "HTTPConnection",
"HTTPException", "NotConnected", "UnknownProtocol",
"UnknownTransferEncoding", "UnimplementedFileMode",
"IncompleteRead", "InvalidURL", "ImproperConnectionState",
"CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
"BadStatusLine", "error", "responses"]
HTTP_PORT = 80
HTTPS_PORT = 443
_UNKNOWN = 'UNKNOWN'
# connection states
_CS_IDLE = 'Idle'
_CS_REQ_STARTED = 'Request-started'
_CS_REQ_SENT = 'Request-sent'
# status codes
# informational
CONTINUE = 100
SWITCHING_PROTOCOLS = 101
PROCESSING = 102
# successful
OK = 200
CREATED = 201
ACCEPTED = 202
NON_AUTHORITATIVE_INFORMATION = 203
NO_CONTENT = 204
RESET_CONTENT = 205
PARTIAL_CONTENT = 206
MULTI_STATUS = 207
IM_USED = 226
# redirection
MULTIPLE_CHOICES = 300
MOVED_PERMANENTLY = 301
FOUND = 302
SEE_OTHER = 303
NOT_MODIFIED = 304
USE_PROXY = 305
TEMPORARY_REDIRECT = 307
# client error
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
METHOD_NOT_ALLOWED = 405
NOT_ACCEPTABLE = 406
PROXY_AUTHENTICATION_REQUIRED = 407
REQUEST_TIMEOUT = 408
CONFLICT = 409
GONE = 410
LENGTH_REQUIRED = 411
PRECONDITION_FAILED = 412
REQUEST_ENTITY_TOO_LARGE = 413
REQUEST_URI_TOO_LONG = 414
UNSUPPORTED_MEDIA_TYPE = 415
REQUESTED_RANGE_NOT_SATISFIABLE = 416
EXPECTATION_FAILED = 417
UNPROCESSABLE_ENTITY = 422
LOCKED = 423
FAILED_DEPENDENCY = 424
UPGRADE_REQUIRED = 426
PRECONDITION_REQUIRED = 428
TOO_MANY_REQUESTS = 429
REQUEST_HEADER_FIELDS_TOO_LARGE = 431
# server error
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
BAD_GATEWAY = 502
SERVICE_UNAVAILABLE = 503
GATEWAY_TIMEOUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
INSUFFICIENT_STORAGE = 507
NOT_EXTENDED = 510
NETWORK_AUTHENTICATION_REQUIRED = 511
# Mapping status codes to official W3C names
responses = {
100: 'Continue',
101: 'Switching Protocols',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
306: '(Unused)',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request-URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
428: 'Precondition Required',
429: 'Too Many Requests',
431: 'Request Header Fields Too Large',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported',
511: 'Network Authentication Required',
}
# maximal amount of data to read at one time in _safe_read
MAXAMOUNT = 1048576
# maximal line length when calling readline().
_MAXLINE = 65536
_MAXHEADERS = 100
class HTTPMessage(email_message.Message):
# XXX The only usage of this method is in
# http.server.CGIHTTPRequestHandler. Maybe move the code there so
# that it doesn't need to be part of the public API. The API has
# never been defined so this could cause backwards compatibility
# issues.
def getallmatchingheaders(self, name):
"""Find all header lines matching a given header name.
Look through the list of headers and find all lines matching a given
header name (and their continuation lines). A list of the lines is
returned, without interpretation. If the header does not occur, an
empty list is returned. If the header occurs multiple times, all
occurrences are returned. Case is not important in the header name.
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.keys():
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(line)
return lst
def parse_headers(fp, _class=HTTPMessage):
"""Parses only RFC2822 headers from a file pointer.
email Parser wants to see strings rather than bytes.
But a TextIOWrapper around self.rfile would buffer too many bytes
from the stream, bytes which we later need to read as bytes.
So we read the correct bytes here, as bytes, for email Parser
to parse.
"""
headers = []
while True:
line = fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("header line")
headers.append(line)
if len(headers) > _MAXHEADERS:
raise HTTPException("got more than %d headers" % _MAXHEADERS)
if line in (b'\r\n', b'\n', b''):
break
hstring = bytes(b'').join(headers).decode('iso-8859-1')
return email_parser.Parser(_class=_class).parsestr(hstring)
_strict_sentinel = object()
class HTTPResponse(io.RawIOBase):
# See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details.
# The bytes from the socket object are iso-8859-1 strings.
# See RFC 2616 sec 2.2 which notes an exception for MIME-encoded
# text following RFC 2047. The basic status line parsing only
# accepts iso-8859-1.
def __init__(self, sock, debuglevel=0, strict=_strict_sentinel, method=None, url=None):
# If the response includes a content-length header, we need to
# make sure that the client doesn't read more than the
# specified number of bytes. If it does, it will block until
# the server times out and closes the connection. This will
# happen if a self.fp.read() is done (without a size) whether
# self.fp is buffered or not. So, no self.fp.read() by
# clients unless they know what they are doing.
self.fp = sock.makefile("rb")
self.debuglevel = debuglevel
if strict is not _strict_sentinel:
warnings.warn("the 'strict' argument isn't supported anymore; "
"http.client now always assumes HTTP/1.x compliant servers.",
DeprecationWarning, 2)
self._method = method
# The HTTPResponse object is returned via urllib. The clients
# of http and urllib expect different attributes for the
# headers. headers is used here and supports urllib. msg is
# provided as a backwards compatibility layer for http
# clients.
self.headers = self.msg = None
# from the Status-Line of the response
self.version = _UNKNOWN # HTTP-Version
self.status = _UNKNOWN # Status-Code
self.reason = _UNKNOWN # Reason-Phrase
self.chunked = _UNKNOWN # is "chunked" being used?
self.chunk_left = _UNKNOWN # bytes left to read in current chunk
self.length = _UNKNOWN # number of bytes left in response
self.will_close = _UNKNOWN # conn will close at end of response
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
raise BadStatusLine(line)
try:
version, status, reason = line.split(None, 2)
except ValueError:
try:
version, status = line.split(None, 1)
reason = ""
except ValueError:
# empty version will cause next test to fail.
version = ""
if not version.startswith("HTTP/"):
self._close_conn()
raise BadStatusLine(line)
# The status code is a three-digit number
try:
status = int(status)
if status < 100 or status > 999:
raise BadStatusLine(line)
except ValueError:
raise BadStatusLine(line)
return version, status, reason
def begin(self):
if self.headers is not None:
# we've already started reading the response
return
# read until we get a non-100 response
while True:
version, status, reason = self._read_status()
if status != CONTINUE:
break
# skip the header from the 100 response
while True:
skip = self.fp.readline(_MAXLINE + 1)
if len(skip) > _MAXLINE:
raise LineTooLong("header line")
skip = skip.strip()
if not skip:
break
if self.debuglevel > 0:
print("header:", skip)
self.code = self.status = status
self.reason = reason.strip()
if version in ("HTTP/1.0", "HTTP/0.9"):
# Some servers might still return "0.9", treat it as 1.0 anyway
self.version = 10
elif version.startswith("HTTP/1."):
self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
else:
raise UnknownProtocol(version)
self.headers = self.msg = parse_headers(self.fp)
if self.debuglevel > 0:
for hdr in self.headers:
print("header:", hdr, end=" ")
# are we using the chunked-style of transfer encoding?
tr_enc = self.headers.get("transfer-encoding")
if tr_enc and tr_enc.lower() == "chunked":
self.chunked = True
self.chunk_left = None
else:
self.chunked = False
# will the connection close at the end of the response?
self.will_close = self._check_close()
# do we have a Content-Length?
# NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
self.length = None
length = self.headers.get("content-length")
# are we using the chunked-style of transfer encoding?
tr_enc = self.headers.get("transfer-encoding")
if length and not self.chunked:
try:
self.length = int(length)
except ValueError:
self.length = None
else:
if self.length < 0: # ignore nonsensical negative lengths
self.length = None
else:
self.length = None
# does the body have a fixed length? (of zero)
if (status == NO_CONTENT or status == NOT_MODIFIED or
100 <= status < 200 or # 1xx codes
self._method == "HEAD"):
self.length = 0
# if the connection remains open, and we aren't using chunked, and
# a content-length was not provided, then assume that the connection
# WILL close.
if (not self.will_close and
not self.chunked and
self.length is None):
self.will_close = True
def _check_close(self):
conn = self.headers.get("connection")
if self.version == 11:
# An HTTP/1.1 proxy is assumed to stay open unless
# explicitly closed.
conn = self.headers.get("connection")
if conn and "close" in conn.lower():
return True
return False
# Some HTTP/1.0 implementations have support for persistent
# connections, using rules different than HTTP/1.1.
# For older HTTP, Keep-Alive indicates persistent connection.
if self.headers.get("keep-alive"):
return False
# At least Akamai returns a "Connection: Keep-Alive" header,
# which was supposed to be sent by the client.
if conn and "keep-alive" in conn.lower():
return False
# Proxy-Connection is a netscape hack.
pconn = self.headers.get("proxy-connection")
if pconn and "keep-alive" in pconn.lower():
return False
# otherwise, assume it will close
return True
def _close_conn(self):
fp = self.fp
self.fp = None
fp.close()
def close(self):
super().close() # set "closed" flag
if self.fp:
self._close_conn()
# These implementations are for the benefit of io.BufferedReader.
# XXX This class should probably be revised to act more like
# the "raw stream" that BufferedReader expects.
def flush(self):
super().flush()
if self.fp:
self.fp.flush()
def readable(self):
return True
# End of "raw stream" methods
def isclosed(self):
"""True if the connection is closed."""
# NOTE: it is possible that we will not ever call self.close(). This
# case occurs when will_close is TRUE, length is None, and we
# read up to the last byte, but NOT past it.
#
# IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
# called, meaning self.isclosed() is meaningful.
return self.fp is None
def read(self, amt=None):
if self.fp is None:
return bytes(b"")
if self._method == "HEAD":
self._close_conn()
return bytes(b"")
if amt is not None:
# Amount is given, so call base class version
# (which is implemented in terms of self.readinto)
return bytes(super(HTTPResponse, self).read(amt))
else:
# Amount is not given (unbounded read) so we must check self.length
# and self.chunked
if self.chunked:
return self._readall_chunked()
if self.length is None:
s = self.fp.read()
else:
try:
s = self._safe_read(self.length)
except IncompleteRead:
self._close_conn()
raise
self.length = 0
self._close_conn() # we read everything
return bytes(s)
def readinto(self, b):
if self.fp is None:
return 0
if self._method == "HEAD":
self._close_conn()
return 0
if self.chunked:
return self._readinto_chunked(b)
if self.length is not None:
if len(b) > self.length:
# clip the read to the "end of response"
b = memoryview(b)[0:self.length]
# we do not use _safe_read() here because this may be a .will_close
# connection, and the user is reading more bytes than will be provided
# (for example, reading in 1k chunks)
if PY2:
data = self.fp.read(len(b))
n = len(data)
b[:n] = data
else:
n = self.fp.readinto(b)
if not n and b:
# Ideally, we would raise IncompleteRead if the content-length
# wasn't satisfied, but it might break compatibility.
self._close_conn()
elif self.length is not None:
self.length -= n
if not self.length:
self._close_conn()
return n
def _read_next_chunk_size(self):
# Read the next chunk size from the file
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("chunk size")
i = line.find(b";")
if i >= 0:
line = line[:i] # strip chunk-extensions
try:
return int(line, 16)
except ValueError:
# close the connection as protocol synchronisation is
# probably lost
self._close_conn()
raise
def _read_and_discard_trailer(self):
# read and discard trailer up to the CRLF terminator
### note: we shouldn't have any trailers!
while True:
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("trailer line")
if not line:
# a vanishingly small number of sites EOF without
# sending the trailer
break
if line in (b'\r\n', b'\n', b''):
break
def _readall_chunked(self):
assert self.chunked != _UNKNOWN
chunk_left = self.chunk_left
value = []
while True:
if chunk_left is None:
try:
chunk_left = self._read_next_chunk_size()
if chunk_left == 0:
break
except ValueError:
raise IncompleteRead(bytes(b'').join(value))
value.append(self._safe_read(chunk_left))
# we read the whole chunk, get another
self._safe_read(2) # toss the CRLF at the end of the chunk
chunk_left = None
self._read_and_discard_trailer()
# we read everything; close the "file"
self._close_conn()
return bytes(b'').join(value)
def _readinto_chunked(self, b):
assert self.chunked != _UNKNOWN
chunk_left = self.chunk_left
total_bytes = 0
mvb = memoryview(b)
while True:
if chunk_left is None:
try:
chunk_left = self._read_next_chunk_size()
if chunk_left == 0:
break
except ValueError:
raise IncompleteRead(bytes(b[0:total_bytes]))
if len(mvb) < chunk_left:
n = self._safe_readinto(mvb)
self.chunk_left = chunk_left - n
return total_bytes + n
elif len(mvb) == chunk_left:
n = self._safe_readinto(mvb)
self._safe_read(2) # toss the CRLF at the end of the chunk
self.chunk_left = None
return total_bytes + n
else:
temp_mvb = mvb[0:chunk_left]
n = self._safe_readinto(temp_mvb)
mvb = mvb[n:]
total_bytes += n
# we read the whole chunk, get another
self._safe_read(2) # toss the CRLF at the end of the chunk
chunk_left = None
self._read_and_discard_trailer()
# we read everything; close the "file"
self._close_conn()
return total_bytes
def _safe_read(self, amt):
"""Read the number of bytes requested, compensating for partial reads.
Normally, we have a blocking socket, but a read() can be interrupted
by a signal (resulting in a partial read).
Note that we cannot distinguish between EOF and an interrupt when zero
bytes have been read. IncompleteRead() will be raised in this
situation.
This function should be used when <amt> bytes "should" be present for
reading. If the bytes are truly not available (due to EOF), then the
IncompleteRead exception can be used to detect the problem.
"""
s = []
while amt > 0:
chunk = self.fp.read(min(amt, MAXAMOUNT))
if not chunk:
raise IncompleteRead(bytes(b'').join(s), amt)
s.append(chunk)
amt -= len(chunk)
return bytes(b"").join(s)
def _safe_readinto(self, b):
"""Same as _safe_read, but for reading into a buffer."""
total_bytes = 0
mvb = memoryview(b)
while total_bytes < len(b):
if MAXAMOUNT < len(mvb):
temp_mvb = mvb[0:MAXAMOUNT]
if PY2:
data = self.fp.read(len(temp_mvb))
n = len(data)
temp_mvb[:n] = data
else:
n = self.fp.readinto(temp_mvb)
else:
if PY2:
data = self.fp.read(len(mvb))
n = len(data)
mvb[:n] = data
else:
n = self.fp.readinto(mvb)
if not n:
raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b))
mvb = mvb[n:]
total_bytes += n
return total_bytes
def fileno(self):
return self.fp.fileno()
def getheader(self, name, default=None):
if self.headers is None:
raise ResponseNotReady()
headers = self.headers.get_all(name) or default
if isinstance(headers, str) or not hasattr(headers, '__iter__'):
return headers
else:
return ', '.join(headers)
def getheaders(self):
"""Return list of (header, value) tuples."""
if self.headers is None:
raise ResponseNotReady()
return list(self.headers.items())
# We override IOBase.__iter__ so that it doesn't check for closed-ness
def __iter__(self):
return self
# For compatibility with old-style urllib responses.
def info(self):
return self.headers
def geturl(self):
return self.url
def getcode(self):
return self.status
class HTTPConnection(object):
_http_vsn = 11
_http_vsn_str = 'HTTP/1.1'
response_class = HTTPResponse
default_port = HTTP_PORT
auto_open = 1
debuglevel = 0
def __init__(self, host, port=None, strict=_strict_sentinel,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
if strict is not _strict_sentinel:
warnings.warn("the 'strict' argument isn't supported anymore; "
"http.client now always assumes HTTP/1.x compliant servers.",
DeprecationWarning, 2)
self.timeout = timeout
self.source_address = source_address
self.sock = None
self._buffer = []
self.__response = None
self.__state = _CS_IDLE
self._method = None
self._tunnel_host = None
self._tunnel_port = None
self._tunnel_headers = {}
self._set_hostport(host, port)
def set_tunnel(self, host, port=None, headers=None):
""" Sets up the host and the port for the HTTP CONNECT Tunnelling.
The headers argument should be a mapping of extra HTTP headers
to send with the CONNECT request.
"""
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def _set_hostport(self, host, port):
if port is None:
i = host.rfind(':')
j = host.rfind(']') # ipv6 addresses have [...]
if i > j:
try:
port = int(host[i+1:])
except ValueError:
if host[i+1:] == "": # http://foo.com:/ == http://foo.com/
port = self.default_port
else:
raise InvalidURL("nonnumeric port: '%s'" % host[i+1:])
host = host[:i]
else:
port = self.default_port
if host and host[0] == '[' and host[-1] == ']':
host = host[1:-1]
self.host = host
self.port = port
def set_debuglevel(self, level):
self.debuglevel = level
def _tunnel(self):
self._set_hostport(self._tunnel_host, self._tunnel_port)
connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self.host, self.port)
connect_bytes = connect_str.encode("ascii")
self.send(connect_bytes)
for header, value in self._tunnel_headers.items():
header_str = "%s: %s\r\n" % (header, value)
header_bytes = header_str.encode("latin-1")
self.send(header_bytes)
self.send(bytes(b'\r\n'))
response = self.response_class(self.sock, method=self._method)
(version, code, message) = response._read_status()
if code != 200:
self.close()
raise socket.error("Tunnel connection failed: %d %s" % (code,
message.strip()))
while True:
line = response.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("header line")
if not line:
# for sites which EOF without sending a trailer
break
if line in (b'\r\n', b'\n', b''):
break
def connect(self):
"""Connect to the host and port specified in __init__."""
self.sock = socket_create_connection((self.host,self.port),
self.timeout, self.source_address)
if self._tunnel_host:
self._tunnel()
def close(self):
"""Close the connection to the HTTP server."""
if self.sock:
self.sock.close() # close it manually... there may be other refs
self.sock = None
if self.__response:
self.__response.close()
self.__response = None
self.__state = _CS_IDLE
def send(self, data):
"""Send `data' to the server.
``data`` can be a string object, a bytes object, an array object, a
file-like object that supports a .read() method, or an iterable object.
"""
if self.sock is None:
if self.auto_open:
self.connect()
else:
raise NotConnected()
if self.debuglevel > 0:
print("send:", repr(data))
blocksize = 8192
# Python 2.7 array objects have a read method which is incompatible
# with the 2-arg calling syntax below.
if hasattr(data, "read") and not isinstance(data, array):
if self.debuglevel > 0:
print("sendIng a read()able")
encode = False
try:
mode = data.mode
except AttributeError:
# io.BytesIO and other file-like objects don't have a `mode`
# attribute.
pass
else:
if "b" not in mode:
encode = True
if self.debuglevel > 0:
print("encoding file using iso-8859-1")
while 1:
datablock = data.read(blocksize)
if not datablock:
break
if encode:
datablock = datablock.encode("iso-8859-1")
self.sock.sendall(datablock)
return
try:
self.sock.sendall(data)
except TypeError:
if isinstance(data, collections.Iterable):
for d in data:
self.sock.sendall(d)
else:
raise TypeError("data should be a bytes-like object "
"or an iterable, got %r" % type(data))
def _output(self, s):
"""Add a line of output to the current request buffer.
Assumes that the line does *not* end with \\r\\n.
"""
self._buffer.append(s)
def _send_output(self, message_body=None):
"""Send the currently buffered request and clear the buffer.
Appends an extra \\r\\n to the buffer.
A message_body may be specified, to be appended to the request.
"""
self._buffer.extend((bytes(b""), bytes(b"")))
msg = bytes(b"\r\n").join(self._buffer)
del self._buffer[:]
# If msg and message_body are sent in a single send() call,
# it will avoid performance problems caused by the interaction
# between delayed ack and the Nagle algorithm.
if isinstance(message_body, bytes):
msg += message_body
message_body = None
self.send(msg)
if message_body is not None:
# message_body was not a string (i.e. it is a file), and
# we must run the risk of Nagle.
self.send(message_body)
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
"""Send a request to the server.
`method' specifies an HTTP request method, e.g. 'GET'.
`url' specifies the object being requested, e.g. '/index.html'.
`skip_host' if True does not add automatically a 'Host:' header
`skip_accept_encoding' if True does not add automatically an
'Accept-Encoding:' header
"""
# if a prior response has been completed, then forget about it.
if self.__response and self.__response.isclosed():
self.__response = None
# in certain cases, we cannot issue another request on this connection.
# this occurs when:
# 1) we are in the process of sending a request. (_CS_REQ_STARTED)
# 2) a response to a previous request has signalled that it is going
# to close the connection upon completion.
# 3) the headers for the previous response have not been read, thus
# we cannot determine whether point (2) is true. (_CS_REQ_SENT)
#
# if there is no prior response, then we can request at will.
#
# if point (2) is true, then we will have passed the socket to the
# response (effectively meaning, "there is no prior response"), and
# will open a new one when a new request is made.
#
# Note: if a prior response exists, then we *can* start a new request.
# We are not allowed to begin fetching the response to this new
# request, however, until that prior response is complete.
#
if self.__state == _CS_IDLE:
self.__state = _CS_REQ_STARTED
else:
raise CannotSendRequest(self.__state)
# Save the method we use, we need it later in the response phase
self._method = method
if not url:
url = '/'
request = '%s %s %s' % (method, url, self._http_vsn_str)
# Non-ASCII characters should have been eliminated earlier
self._output(request.encode('ascii'))
if self._http_vsn == 11:
# Issue some standard headers for better HTTP/1.1 compliance
if not skip_host:
# this header is issued *only* for HTTP/1.1
# connections. more specifically, this means it is
# only issued when the client uses the new
# HTTPConnection() class. backwards-compat clients
# will be using HTTP/1.0 and those clients may be
# issuing this header themselves. we should NOT issue
# it twice; some web servers (such as Apache) barf
# when they see two Host: headers
# If we need a non-standard port,include it in the
# header. If the request is going through a proxy,
# but the host of the actual URL, not the host of the
# proxy.
netloc = ''
if url.startswith('http'):
nil, netloc, nil, nil, nil = urlsplit(url)
if netloc:
try:
netloc_enc = netloc.encode("ascii")
except UnicodeEncodeError:
netloc_enc = netloc.encode("idna")
self.putheader('Host', netloc_enc)
else:
try:
host_enc = self.host.encode("ascii")
except UnicodeEncodeError:
host_enc = self.host.encode("idna")
# As per RFC 273, IPv6 address should be wrapped with []
# when used as Host header
if self.host.find(':') >= 0:
host_enc = bytes(b'[' + host_enc + b']')
if self.port == self.default_port:
self.putheader('Host', host_enc)
else:
host_enc = host_enc.decode("ascii")
self.putheader('Host', "%s:%s" % (host_enc, self.port))
# note: we are assuming that clients will not attempt to set these
# headers since *this* library must deal with the
# consequences. this also means that when the supporting
# libraries are updated to recognize other forms, then this
# code should be changed (removed or updated).
# we only want a Content-Encoding of "identity" since we don't
# support encodings such as x-gzip or x-deflate.
if not skip_accept_encoding:
self.putheader('Accept-Encoding', 'identity')
# we can accept "chunked" Transfer-Encodings, but no others
# NOTE: no TE header implies *only* "chunked"
#self.putheader('TE', 'chunked')
# if TE is supplied in the header, then it must appear in a
# Connection header.
#self.putheader('Connection', 'TE')
else:
# For HTTP/1.0, the server will assume "not chunked"
pass
def putheader(self, header, *values):
"""Send a request header line to the server.
For example: h.putheader('Accept', 'text/html')
"""
if self.__state != _CS_REQ_STARTED:
raise CannotSendHeader()
if hasattr(header, 'encode'):
header = header.encode('ascii')
values = list(values)
for i, one_value in enumerate(values):
if hasattr(one_value, 'encode'):
values[i] = one_value.encode('latin-1')
elif isinstance(one_value, int):
values[i] = str(one_value).encode('ascii')
value = bytes(b'\r\n\t').join(values)
header = header + bytes(b': ') + value
self._output(header)
def endheaders(self, message_body=None):
"""Indicate that the last header line has been sent to the server.
This method sends the request to the server. The optional message_body
argument can be used to pass a message body associated with the
request. The message body will be sent in the same packet as the
message headers if it is a string, otherwise it is sent as a separate
packet.
"""
if self.__state == _CS_REQ_STARTED:
self.__state = _CS_REQ_SENT
else:
raise CannotSendHeader()
self._send_output(message_body)
def request(self, method, url, body=None, headers={}):
"""Send a complete request to the server."""
self._send_request(method, url, body, headers)
def _set_content_length(self, body):
# Set the content-length based on the body.
thelen = None
try:
thelen = str(len(body))
except TypeError as te:
# If this is a file-like object, try to
# fstat its file descriptor
try:
thelen = str(os.fstat(body.fileno()).st_size)
except (AttributeError, OSError):
# Don't send a length if this failed
if self.debuglevel > 0: print("Cannot stat!!")
if thelen is not None:
self.putheader('Content-Length', thelen)
def _send_request(self, method, url, body, headers):
# Honor explicitly requested Host: and Accept-Encoding: headers.
header_names = dict.fromkeys([k.lower() for k in headers])
skips = {}
if 'host' in header_names:
skips['skip_host'] = 1
if 'accept-encoding' in header_names:
skips['skip_accept_encoding'] = 1
self.putrequest(method, url, **skips)
if body is not None and ('content-length' not in header_names):
self._set_content_length(body)
for hdr, value in headers.items():
self.putheader(hdr, value)
if isinstance(body, str):
# RFC 2616 Section 3.7.1 says that text default has a
# default charset of iso-8859-1.
body = body.encode('iso-8859-1')
self.endheaders(body)
def getresponse(self):
"""Get the response from the server.
If the HTTPConnection is in the correct state, returns an
instance of HTTPResponse or of whatever object is returned by
class the response_class variable.
If a request has not been sent or if a previous response has
not be handled, ResponseNotReady is raised. If the HTTP
response indicates that the connection should be closed, then
it will be closed before the response is returned. When the
connection is closed, the underlying socket is closed.
"""
# if a prior response has been completed, then forget about it.
if self.__response and self.__response.isclosed():
self.__response = None
# if a prior response exists, then it must be completed (otherwise, we
# cannot read this response's header to determine the connection-close
# behavior)
#
# note: if a prior response existed, but was connection-close, then the
# socket and response were made independent of this HTTPConnection
# object since a new request requires that we open a whole new
# connection
#
# this means the prior response had one of two states:
# 1) will_close: this connection was reset and the prior socket and
# response operate independently
# 2) persistent: the response was retained and we await its
# isclosed() status to become true.
#
if self.__state != _CS_REQ_SENT or self.__response:
raise ResponseNotReady(self.__state)
if self.debuglevel > 0:
response = self.response_class(self.sock, self.debuglevel,
method=self._method)
else:
response = self.response_class(self.sock, method=self._method)
response.begin()
assert response.will_close != _UNKNOWN
self.__state = _CS_IDLE
if response.will_close:
# this effectively passes the connection to the response
self.close()
else:
# remember this, so we can tell when it is complete
self.__response = response
return response
try:
import ssl
from ssl import SSLContext
except ImportError:
pass
else:
class HTTPSConnection(HTTPConnection):
"This class allows communication via SSL."
default_port = HTTPS_PORT
# XXX Should key_file and cert_file be deprecated in favour of context?
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=_strict_sentinel, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, **_3to2kwargs):
if 'check_hostname' in _3to2kwargs: check_hostname = _3to2kwargs['check_hostname']; del _3to2kwargs['check_hostname']
else: check_hostname = None
if 'context' in _3to2kwargs: context = _3to2kwargs['context']; del _3to2kwargs['context']
else: context = None
super(HTTPSConnection, self).__init__(host, port, strict, timeout,
source_address)
self.key_file = key_file
self.cert_file = cert_file
if context is None:
# Some reasonable defaults
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
will_verify = context.verify_mode != ssl.CERT_NONE
if check_hostname is None:
check_hostname = will_verify
elif check_hostname and not will_verify:
raise ValueError("check_hostname needs a SSL context with "
"either CERT_OPTIONAL or CERT_REQUIRED")
if key_file or cert_file:
context.load_cert_chain(cert_file, key_file)
self._context = context
self._check_hostname = check_hostname
def connect(self):
"Connect to a host on a given (SSL) port."
sock = socket_create_connection((self.host, self.port),
self.timeout, self.source_address)
if self._tunnel_host:
self.sock = sock
self._tunnel()
server_hostname = self.host if ssl.HAS_SNI else None
self.sock = self._context.wrap_socket(sock,
server_hostname=server_hostname)
try:
if self._check_hostname:
ssl.match_hostname(self.sock.getpeercert(), self.host)
except Exception:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
__all__.append("HTTPSConnection")
# ######################################
# # We use the old HTTPSConnection class from Py2.7, because ssl.SSLContext
# # doesn't exist in the Py2.7 stdlib
# class HTTPSConnection(HTTPConnection):
# "This class allows communication via SSL."
# default_port = HTTPS_PORT
# def __init__(self, host, port=None, key_file=None, cert_file=None,
# strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
# source_address=None):
# HTTPConnection.__init__(self, host, port, strict, timeout,
# source_address)
# self.key_file = key_file
# self.cert_file = cert_file
# def connect(self):
# "Connect to a host on a given (SSL) port."
# sock = socket_create_connection((self.host, self.port),
# self.timeout, self.source_address)
# if self._tunnel_host:
# self.sock = sock
# self._tunnel()
# self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file)
# __all__.append("HTTPSConnection")
# ######################################
class HTTPException(Exception):
# Subclasses that define an __init__ must call Exception.__init__
# or define self.args. Otherwise, str() will fail.
pass
class NotConnected(HTTPException):
pass
class InvalidURL(HTTPException):
pass
class UnknownProtocol(HTTPException):
def __init__(self, version):
self.args = version,
self.version = version
class UnknownTransferEncoding(HTTPException):
pass
class UnimplementedFileMode(HTTPException):
pass
class IncompleteRead(HTTPException):
def __init__(self, partial, expected=None):
self.args = partial,
self.partial = partial
self.expected = expected
def __repr__(self):
if self.expected is not None:
e = ', %i more expected' % self.expected
else:
e = ''
return 'IncompleteRead(%i bytes read%s)' % (len(self.partial), e)
def __str__(self):
return repr(self)
class ImproperConnectionState(HTTPException):
pass
class CannotSendRequest(ImproperConnectionState):
pass
class CannotSendHeader(ImproperConnectionState):
pass
class ResponseNotReady(ImproperConnectionState):
pass
class BadStatusLine(HTTPException):
def __init__(self, line):
if not line:
line = repr(line)
self.args = line,
self.line = line
class LineTooLong(HTTPException):
def __init__(self, line_type):
HTTPException.__init__(self, "got more than %d bytes when reading %s"
% (_MAXLINE, line_type))
# for backwards compatibility
error = HTTPException
|
py | 1a43f14b16c382b4c9700c6c646472d17d60bc67 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdataworks_public.endpoint import endpoint_data
class SetSuccessInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'dataworks-public', '2020-05-18', 'SetSuccessInstance')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ProjectEnv(self):
return self.get_body_params().get('ProjectEnv')
def set_ProjectEnv(self,ProjectEnv):
self.add_body_params('ProjectEnv', ProjectEnv)
def get_InstanceId(self):
return self.get_body_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_body_params('InstanceId', InstanceId) |
py | 1a43f18ee5a799538e2ce8771bad789aaa6c5941 | from scipy.spatial.transform.rotation import Rotation
from alitra import Euler, Quaternion
def quaternion_to_euler(
quaternion: Quaternion, sequence: str = "ZYX", degrees: bool = False
) -> Euler:
"""
Transform a quaternion into Euler angles.
:param quaternion: A Quaternion object.
:param sequence: Rotation sequence for the Euler angles.
:param degrees: Set to true if the resulting Euler angles should be in degrees. Default is radians.
:return: Euler object.
"""
rotation_object: Rotation = Rotation.from_quat(quaternion.as_np_array())
euler: Euler = Euler.from_array(
rotation_object.as_euler(sequence, degrees=degrees), frame="robot"
)
return euler
|
py | 1a43f22468e8e93068f9e2480fa3b902f8aaa998 |
## 这里我们用逻辑回归来判断手写数字的扫描图像来判断数字是多少。
from sklearn import datasets
from sklearn.linear_model import LogisticRegression
## 导入数据
digits = datasets.load_digits()
X_digits = digits.data
y_digits = digits.target
n_samples = len(X_digits)
## 拆分训练集和测试集
X_train = X_digits[:.9 * n_samples]
y_train = y_digits[:.9 * n_samples]
X_test = X_digits[.9 * n_samples:]
y_test = y_digits[.9 * n_samples:]
model = LogisticRegression()
## 训练模型
model.fit(X_train, y_train)
## 进行预测
prediction = model.predict(X_test)
score = model.score(X_test, y_test)
print(score)
|
py | 1a43f23a26299d626e34ba63db069c6dd7476444 | # Copyright 2009 Armin Hornung, University of Freiburg
# http://www.ros.org/wiki/nao
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# # Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# # Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# # Neither the name of the University of Freiburg nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from .naoqi_node import *
|
py | 1a43f29febb295d2fb32057e7d2c3de3add0f86d | from __future__ import division
import chainer
import chainer.functions as F
from chainercv.links import Conv2DBNActiv
from chainercv.links import SeparableConv2DBNActiv
class SeparableASPP(chainer.Chain):
"""Atrous Spatial Pyramid Pooling with Separable Convolution.
average pooling with FC layer
1x1 Convolution
in --> Separable Convolution(k=12) --> concat --> 1x1 Convolution
Separable Convolution(k=24)
Separable Convolution(k=36)
Args:
in_channels (int): Number of channels of input arrays.
out_channels (int): Number of channels of output arrays.
dilate_list (tuple of ints): Tuple of Dilation factors.
the length of this tuple must be 3.
bn_kwargs (dict): Keywod arguments passed to initialize the batch
normalization layers of :class:`chainercv.links.Conv2DBNActiv` and
:class:`chainercv.links.SeparableConv2DBNActiv`.
"""
def __init__(self, in_channels, out_channels,
dilate_list=(12, 24, 36), bn_kwargs={}):
super(SeparableASPP, self).__init__()
with self.init_scope():
self.image_pooling_conv = Conv2DBNActiv(
in_channels, out_channels, 1, bn_kwargs=bn_kwargs)
self.conv1x1 = Conv2DBNActiv(
in_channels, out_channels, 1, bn_kwargs=bn_kwargs)
self.atrous1 = SeparableConv2DBNActiv(
in_channels, out_channels, 3, 1,
dilate_list[0], dilate_list[0], nobias=True,
dw_activ=F.relu, pw_activ=F.relu, bn_kwargs=bn_kwargs)
self.atrous2 = SeparableConv2DBNActiv(
in_channels, out_channels, 3, 1,
dilate_list[1], dilate_list[1], nobias=True,
dw_activ=F.relu, pw_activ=F.relu, bn_kwargs=bn_kwargs)
self.atrous3 = SeparableConv2DBNActiv(
in_channels, out_channels, 3, 1,
dilate_list[2], dilate_list[2], nobias=True,
dw_activ=F.relu, pw_activ=F.relu, bn_kwargs=bn_kwargs)
self.proj = Conv2DBNActiv(
out_channels * 5, out_channels, 1, bn_kwargs=bn_kwargs)
def image_pooling(self, x):
_, _, H, W = x.shape
x = F.average(x, axis=(2, 3), keepdims=True)
x = self.image_pooling_conv(x)
B, C, _, _ = x.shape
x = F.broadcast_to(x, (B, C, H, W))
return x
def forward(self, x):
h = []
h.append(self.image_pooling(x))
h.append(self.conv1x1(x))
h.append(self.atrous1(x))
h.append(self.atrous2(x))
h.append(self.atrous3(x))
h = F.concat(h, axis=1)
h = self.proj(h)
h = F.dropout(h)
return h
|
py | 1a43f2deb57e4ef8ae8ac89882a633a04e910ab3 | from setuptools import setup
import sys
if not sys.version_info[0] == 3 and sys.version_info[1] < 5:
sys.exit('Python < 3.5 is not supported')
version = '0.74'
setup(
name='steampy',
packages=['steampy', 'test', 'examples', ],
version=version,
description='A Steam lib for trade automation',
author='Michał Bukowski',
author_email='[email protected]',
license='MIT',
url='https://github.com/bukson/steampy',
download_url='https://github.com/bukson/steampy/tarball/' + version,
keywords=['steam', 'trade', ],
classifiers=[],
install_requires=[
"requests",
"beautifulsoup4",
"rsa"
],
package_data = {
'steampy': ['py.typed'],
},
)
|
py | 1a43f2f39945fe05eac959deb64e6e15273966bf | """Moses tests."""
from typing import ClassVar, Type
import pytest
from gt4sd.algorithms.conditional_generation.guacamol import (
AaeGenerator,
MosesGenerator,
OrganGenerator,
VaeGenerator,
)
from gt4sd.algorithms.core import AlgorithmConfiguration
from gt4sd.algorithms.registry import ApplicationsRegistry
def get_classvar_type(class_var):
"""Extract type from ClassVar type annotation: `ClassVar[T]] -> T`."""
return class_var.__args__[0]
@pytest.mark.parametrize(
"config_class, algorithm_type, domain, algorithm_name",
[
(
AaeGenerator,
"conditional_generation",
"materials",
MosesGenerator.__name__,
),
(
VaeGenerator,
"conditional_generation",
"materials",
MosesGenerator.__name__,
),
(
OrganGenerator,
"conditional_generation",
"materials",
MosesGenerator.__name__,
),
],
)
def test_config_class(
config_class: Type[AlgorithmConfiguration],
algorithm_type: str,
domain: str,
algorithm_name: str,
):
assert config_class.algorithm_type == algorithm_type
assert config_class.domain == domain
assert config_class.algorithm_name == algorithm_name
for keyword, type_annotation in config_class.__annotations__.items():
if keyword in ("algorithm_type", "domain", "algorithm_name"):
assert type_annotation.__origin__ is ClassVar # type: ignore
assert str == get_classvar_type(type_annotation)
@pytest.mark.parametrize(
"config_class",
[(AaeGenerator), (VaeGenerator), (OrganGenerator)],
)
def test_config_instance(config_class: Type[AlgorithmConfiguration]):
config = config_class() # type:ignore
assert config.algorithm_application == config_class.__name__
@pytest.mark.parametrize(
"config_class",
[(AaeGenerator), (VaeGenerator), (OrganGenerator)],
)
def test_available_versions(config_class: Type[AlgorithmConfiguration]):
versions = config_class.list_versions()
assert "v0" in versions
@pytest.mark.parametrize(
"config, algorithm",
[
(AaeGenerator, MosesGenerator),
(VaeGenerator, MosesGenerator),
(OrganGenerator, MosesGenerator),
],
)
def test_generation_via_import(config, algorithm):
config = config()
algorithm = algorithm(configuration=config, target="")
items = list(algorithm.sample(2))
assert len(items) == 2
@pytest.mark.parametrize(
"algorithm_application, algorithm_type, domain, algorithm_name",
[
(
AaeGenerator.__name__,
"conditional_generation",
"materials",
MosesGenerator.__name__,
),
(
VaeGenerator.__name__,
"conditional_generation",
"materials",
MosesGenerator.__name__,
),
(
OrganGenerator.__name__,
"conditional_generation",
"materials",
MosesGenerator.__name__,
),
],
)
def test_generation_via_registry(
algorithm_type, domain, algorithm_name, algorithm_application
):
algorithm = ApplicationsRegistry.get_application_instance(
algorithm_type=algorithm_type,
domain=domain,
algorithm_name=algorithm_name,
algorithm_application=algorithm_application,
)
items = list(algorithm.sample(5))
assert len(items) == 5
|
py | 1a43f46fccc183e38f1c5ab8c182ef7b70ddde74 | """
Copyright (c) 2019 Imperial College London.
This source code is licensed under the MIT license found in the
LICENSE file in the root directory of this source tree.
"""
import torch
import torch.nn as nn
from . import net_utils
class _Residual_Block(nn.Module):
def __init__(self, num_chans=64):
super(_Residual_Block, self).__init__()
bias = True
#res1
self.conv1 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu2 = nn.PReLU()
self.conv3 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu4 = nn.PReLU()
#res1
#concat1
self.conv5 = nn.Conv2d(num_chans, num_chans * 2, kernel_size=3, stride=2, padding=1, bias=bias)
self.relu6 = nn.PReLU()
#res2
self.conv7 = nn.Conv2d(num_chans * 2, num_chans * 2, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu8 = nn.PReLU()
#res2
#concat2
self.conv9 = nn.Conv2d(num_chans * 2, num_chans * 4, kernel_size=3, stride=2, padding=1, bias=bias)
self.relu10 = nn.PReLU()
#res3
self.conv11 = nn.Conv2d(num_chans * 4, num_chans * 4, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu12 = nn.PReLU()
#res3
self.conv13 = nn.Conv2d(num_chans * 4, num_chans * 8, kernel_size=1, stride=1, padding=0, bias=bias)
self.up14 = nn.PixelShuffle(2)
#concat2
self.conv15 = nn.Conv2d(num_chans * 4, num_chans * 2, kernel_size=1, stride=1, padding=0, bias=bias)
#res4
self.conv16 = nn.Conv2d(num_chans * 2, num_chans * 2, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu17 = nn.PReLU()
#res4
self.conv18 = nn.Conv2d(num_chans * 2, num_chans * 4, kernel_size=1, stride=1, padding=0, bias=bias)
self.up19 = nn.PixelShuffle(2)
#concat1
self.conv20 = nn.Conv2d(num_chans * 2, num_chans, kernel_size=1, stride=1, padding=0, bias=bias)
#res5
self.conv21 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu22 = nn.PReLU()
self.conv23 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu24 = nn.PReLU()
#res5
self.conv25 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
def forward(self, x):
res1 = x
out = self.relu4(self.conv3(self.relu2(self.conv1(x))))
out = torch.add(res1, out)
cat1 = out
out = self.relu6(self.conv5(out))
res2 = out
out = self.relu8(self.conv7(out))
out = torch.add(res2, out)
cat2 = out
out = self.relu10(self.conv9(out))
res3 = out
out = self.relu12(self.conv11(out))
out = torch.add(res3, out)
out = self.up14(self.conv13(out))
out = torch.cat([out, cat2], 1)
out = self.conv15(out)
res4 = out
out = self.relu17(self.conv16(out))
out = torch.add(res4, out)
out = self.up19(self.conv18(out))
out = torch.cat([out, cat1], 1)
out = self.conv20(out)
res5 = out
out = self.relu24(self.conv23(self.relu22(self.conv21(out))))
out = torch.add(res5, out)
out = self.conv25(out)
out = torch.add(out, res1)
return out
class Recon_Block(nn.Module):
def __init__(self, num_chans=64):
super(Recon_Block, self).__init__()
bias=True
self.conv1 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu2 = nn.PReLU()
self.conv3 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu4 = nn.PReLU()
self.conv5 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu6= nn.PReLU()
self.conv7 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu8 = nn.PReLU()
self.conv9 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu10 = nn.PReLU()
self.conv11 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu12 = nn.PReLU()
self.conv13 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu14 = nn.PReLU()
self.conv15 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu16 = nn.PReLU()
self.conv17 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
def forward(self, x):
res1 = x
output = self.relu4(self.conv3(self.relu2(self.conv1(x))))
output = torch.add(output, res1)
res2 = output
output = self.relu8(self.conv7(self.relu6(self.conv5(output))))
output = torch.add(output, res2)
res3 = output
output = self.relu12(self.conv11(self.relu10(self.conv9(output))))
output = torch.add(output, res3)
res4 = output
output = self.relu16(self.conv15(self.relu14(self.conv13(output))))
output = torch.add(output, res4)
output = self.conv17(output)
output = torch.add(output, res1)
return output
class DIDN(nn.Module):
"""
Deep Iterative Down-Up Network, NTIRE denoising challenge winning entry
Source: http://openaccess.thecvf.com/content_CVPRW_2019/papers/NTIRE/Yu_Deep_Iterative_Down-Up_CNN_for_Image_Denoising_CVPRW_2019_paper.pdfp
"""
def __init__(self, in_chans, out_chans, num_chans=64,
pad_data=True, global_residual=True, n_res_blocks=6):
super().__init__()
self.pad_data = pad_data
self.global_residual = global_residual
bias=True
self.conv_input = nn.Conv2d(in_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu1 = nn.PReLU()
self.conv_down = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=2, padding=1, bias=bias)
self.relu2 = nn.PReLU()
self.n_res_blocks = n_res_blocks
recursive = []
for i in range(self.n_res_blocks):
recursive.append(_Residual_Block(num_chans))
self.recursive = torch.nn.ModuleList(recursive)
self.conv_mid = nn.Conv2d(num_chans * self.n_res_blocks, num_chans, kernel_size=1, stride=1, padding=0, bias=bias)
self.relu3 = nn.PReLU()
self.conv_mid2 = nn.Conv2d(num_chans, num_chans, kernel_size=3, stride=1, padding=1, bias=bias)
self.relu4 = nn.PReLU()
self.subpixel = nn.PixelShuffle(2)
self.conv_output = nn.Conv2d(num_chans // 4, out_chans, kernel_size=3, stride=1, padding=1, bias=bias)
def forward(self, x):
if self.pad_data:
orig_shape2d = x.shape[-2:]
p2d = net_utils.calculate_downsampling_padding2d(x, 3)
x = net_utils.pad2d(x, p2d)
residual = x
out = self.relu1(self.conv_input(x))
out = self.relu2(self.conv_down(out))
recons = []
for i in range(self.n_res_blocks):
out = self.recursive[i](out)
recons.append(out)
out = torch.cat(recons, 1)
out = self.relu3(self.conv_mid(out))
residual2 = out
out = self.relu4(self.conv_mid2(out))
out = torch.add(out, residual2)
out= self.subpixel(out)
out = self.conv_output(out)
if self.global_residual:
out = torch.add(out, residual)
if self.pad_data:
out = net_utils.unpad2d(out, orig_shape2d)
return out
|
py | 1a43f49614df1f1c92908f1b529c9e934b925ab2 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains a factory for building various models."""
from __future__ import absolute_import, division, print_function
import functools
import tensorflow as tf
from nets import (alexnet, cifarnet, inception, lenet, mobilenet_v1, overfeat,
resnet_v1, resnet_v2, vgg)
from nets.mobilenet import mobilenet_v2
from nets.nasnet import nasnet, pnasnet
slim = tf.contrib.slim
networks_map = {'alexnet_v2': alexnet.alexnet_v2,
'cifarnet': cifarnet.cifarnet,
'overfeat': overfeat.overfeat,
'vgg_a': vgg.vgg_a,
'vgg_16': vgg.vgg_16,
'vgg_19': vgg.vgg_19,
'inception_v1': inception.inception_v1,
'inception_v2': inception.inception_v2,
'inception_v3': inception.inception_v3,
'inception_v4': inception.inception_v4,
'inception_resnet_v2': inception.inception_resnet_v2,
'lenet': lenet.lenet,
'resnet_v1_50': resnet_v1.resnet_v1_50,
'resnet_v1_101': resnet_v1.resnet_v1_101,
'resnet_v1_152': resnet_v1.resnet_v1_152,
'resnet_v1_200': resnet_v1.resnet_v1_200,
'resnet_v2_50': resnet_v2.resnet_v2_50,
'resnet_v2_101': resnet_v2.resnet_v2_101,
'resnet_v2_152': resnet_v2.resnet_v2_152,
'resnet_v2_200': resnet_v2.resnet_v2_200,
'mobilenet_v1': mobilenet_v1.mobilenet_v1,
'mobilenet_v1_075': mobilenet_v1.mobilenet_v1_075,
'mobilenet_v1_050': mobilenet_v1.mobilenet_v1_050,
'mobilenet_v1_025': mobilenet_v1.mobilenet_v1_025,
'mobilenet_v2': mobilenet_v2.mobilenet,
'nasnet_cifar': nasnet.build_nasnet_cifar,
'nasnet_mobile': nasnet.build_nasnet_mobile,
'nasnet_large': nasnet.build_nasnet_large,
'pnasnet_large': pnasnet.build_pnasnet_large,
}
arg_scopes_map = {'alexnet_v2': alexnet.alexnet_v2_arg_scope,
'cifarnet': cifarnet.cifarnet_arg_scope,
'overfeat': overfeat.overfeat_arg_scope,
'vgg_a': vgg.vgg_arg_scope,
'vgg_16': vgg.vgg_arg_scope,
'vgg_19': vgg.vgg_arg_scope,
'inception_v1': inception.inception_v3_arg_scope,
'inception_v2': inception.inception_v3_arg_scope,
'inception_v3': inception.inception_v3_arg_scope,
'inception_v4': inception.inception_v4_arg_scope,
'inception_resnet_v2':
inception.inception_resnet_v2_arg_scope,
'lenet': lenet.lenet_arg_scope,
'resnet_v1_50': resnet_v1.resnet_arg_scope,
'resnet_v1_101': resnet_v1.resnet_arg_scope,
'resnet_v1_152': resnet_v1.resnet_arg_scope,
'resnet_v1_200': resnet_v1.resnet_arg_scope,
'resnet_v2_50': resnet_v2.resnet_arg_scope,
'resnet_v2_101': resnet_v2.resnet_arg_scope,
'resnet_v2_152': resnet_v2.resnet_arg_scope,
'resnet_v2_200': resnet_v2.resnet_arg_scope,
'mobilenet_v1': mobilenet_v1.mobilenet_v1_arg_scope,
'mobilenet_v1_075': mobilenet_v1.mobilenet_v1_arg_scope,
'mobilenet_v1_050': mobilenet_v1.mobilenet_v1_arg_scope,
'mobilenet_v1_025': mobilenet_v1.mobilenet_v1_arg_scope,
'mobilenet_v2': mobilenet_v2.training_scope,
'nasnet_cifar': nasnet.nasnet_cifar_arg_scope,
'nasnet_mobile': nasnet.nasnet_mobile_arg_scope,
'nasnet_large': nasnet.nasnet_large_arg_scope,
'pnasnet_large': pnasnet.pnasnet_large_arg_scope,
}
def get_network_fn(name, num_classes, weight_decay=0.0, is_training=False):
"""Returns a network_fn such as `logits, end_points = network_fn(images)`.
Args:
name: The name of the network.
num_classes: The number of classes to use for classification. If 0 or None,
the logits layer is omitted and its input features are returned instead.
weight_decay: The l2 coefficient for the model weights.
is_training: `True` if the model is being used for training and `False`
otherwise.
Returns:
network_fn: A function that applies the model to a batch of images. It has
the following signature:
net, end_points = network_fn(images)
The `images` input is a tensor of shape [batch_size, height, width, 3]
with height = width = network_fn.default_image_size. (The permissibility
and treatment of other sizes depends on the network_fn.)
The returned `end_points` are a dictionary of intermediate activations.
The returned `net` is the topmost layer, depending on `num_classes`:
If `num_classes` was a non-zero integer, `net` is a logits tensor
of shape [batch_size, num_classes].
If `num_classes` was 0 or `None`, `net` is a tensor with the input
to the logits layer of shape [batch_size, 1, 1, num_features] or
[batch_size, num_features]. Dropout has not been applied to this
(even if the network's original classification does); it remains for
the caller to do this or not.
Raises:
ValueError: If network `name` is not recognized.
"""
if name not in networks_map:
raise ValueError('Name of network unknown %s' % name)
func = networks_map[name]
@functools.wraps(func)
def network_fn(images, **kwargs):
arg_scope = arg_scopes_map[name](weight_decay=weight_decay)
with slim.arg_scope(arg_scope):
return func(images, num_classes, is_training=is_training, **kwargs)
if hasattr(func, 'default_image_size'):
network_fn.default_image_size = func.default_image_size
return network_fn
|
py | 1a43f523438bebf69afe91f6511886c437a9e96c | import datetime
from django.test import TestCase
from django.utils import timezone
from django.urls import reverse
from .models import Question
class QuestionModelTests(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recently() returns False for questions whose pub_date
is in the future.
"""
time = timezone.now() + datetime.timedelta(days=30)
future_question = Question(pub_date=time)
self.assertIs(future_question.was_published_recently(), False)
def test_was_published_recently_with_old_question(self):
"""
was_published_recently() returns False for questions whose pub_date
is older than 1 day.
"""
time = timezone.now() - datetime.timedelta(days=1, seconds=1)
old_question = Question(pub_date=time)
self.assertIs(old_question.was_published_recently(), False)
def test_was_published_recently_with_recent_question(self):
"""
was_published_recently() returns True for questions whose pub_date
is within the last day.
"""
time = timezone.now() - datetime.timedelta(hours=23, minutes=59, seconds=59)
recent_question = Question(pub_date=time)
self.assertIs(recent_question.was_published_recently(), True)
def create_question(question_text, days):
"""
Create a question with the given `question_text` and published the
given number of `days` offset to now (negative for questions published
in the past, positive for questions that have yet to be published).
"""
time = timezone.now() + datetime.timedelta(days=days)
return Question.objects.create(question_text=question_text, pub_date=time)
class QuestionIndexViewTests(TestCase):
def test_no_questions(self):
"""
If no questions exist, an appropriate message is displayed.
"""
response = self.client.get(reverse('polls:index'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "No polls are available. 할당된 설문이 없습니다. /admin에서 설문을 생성하세요.")
self.assertQuerysetEqual(response.context['latest_question_list'], [])
def test_past_question(self):
"""
Questions with a pub_date in the past are displayed on the
index page.
"""
create_question(question_text="Past question.", days=-30)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question.>']
)
def test_future_question(self):
"""
Questions with a pub_date in the future aren't displayed on
the index page.
"""
create_question(question_text="Future question.", days=30)
response = self.client.get(reverse('polls:index'))
self.assertContains(response, "No polls are available.")
self.assertQuerysetEqual(response.context['latest_question_list'], [])
def test_future_question_and_past_question(self):
"""
Even if both past and future questions exist, only past questions
are displayed.
"""
create_question(question_text="Past question.", days=-30)
create_question(question_text="Future question.", days=30)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question.>']
)
def test_two_past_questions(self):
"""
The questions index page may display multiple questions.
"""
create_question(question_text="Past question 1.", days=-30)
create_question(question_text="Past question 2.", days=-5)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question 2.>', '<Question: Past question 1.>']
)
class QuestionDetailViewTests(TestCase):
def test_future_question(self):
"""
The detail view of a question with a pub_date in the future
returns a 404 not found.
"""
future_question = create_question(question_text='Future question.', days=5)
url = reverse('polls:detail', args=(future_question.id,))
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_past_question(self):
"""
The detail view of a question with a pub_date in the past
displays the question's text.
"""
past_question = create_question(question_text='Past Question.', days=-5)
url = reverse('polls:detail', args=(past_question.id,))
response = self.client.get(url)
self.assertContains(response, past_question.question_text)
|
py | 1a43f66eb7d6e380e7387ac48b4379996c8fa737 | import os
import re
import shutil
import sys
import ctypes
from pathlib import Path
from colorama import Fore, Back, Style
from .settings import *
if sys.version_info[0] < 3 or sys.version_info[1] <= 5:
print("\nPlease restart with Python 3.6+\n")
print("Current Python version:", sys.version_info)
exit(-1)
ti_core = None
def in_docker():
if os.environ.get("TI_IN_DOCKER", "") == "":
return False
else:
return True
def import_ti_core(tmp_dir=None):
global ti_core
if get_os_name() != 'win':
old_flags = sys.getdlopenflags()
sys.setdlopenflags(2 | 8) # RTLD_NOW | RTLD_DEEPBIND
else:
pyddir = os.path.join(package_root(), 'lib')
os.environ['PATH'] += ';' + pyddir
try:
import taichi_core as core
except Exception as e:
if isinstance(e, ImportError):
print(
Fore.YELLOW + "Share object taichi_core import failed, "
"check this page for possible solutions:\n"
"https://taichi.readthedocs.io/en/stable/install.html#troubleshooting"
+ Fore.RESET)
raise e
ti_core = core
if get_os_name() != 'win':
sys.setdlopenflags(old_flags)
lib_dir = os.path.join(package_root(), 'lib')
core.set_lib_dir(locale_encode(lib_dir))
if tmp_dir is not None:
core.set_tmp_dir(locale_encode(tmp_dir))
def locale_encode(path):
try:
import locale
return path.encode(locale.getdefaultlocale()[1])
except:
try:
import sys
return path.encode(sys.getfilesystemencoding())
except:
try:
return path.encode()
except:
return path
def is_ci():
return os.environ.get('TI_CI', '') == '1'
def package_root():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), '../')
def is_release():
return os.environ.get('TAICHI_REPO_DIR', '') == ''
def get_core_shared_object():
if is_release():
directory = os.path.join(package_root(), 'lib')
else:
directory = get_bin_directory()
return os.path.join(directory, 'libtaichi_core.so')
def get_repo():
from git import Repo
repo = Repo(get_repo_directory())
return repo
def print_red_bold(*args, **kwargs):
print(Fore.RED + Style.BRIGHT, end='')
print(*args, **kwargs)
print(Style.RESET_ALL, end='')
create_sand_box_on_windows = True
def build():
tmp_cwd = os.getcwd()
bin_dir = get_build_directory()
try:
os.mkdir(bin_dir)
except:
pass
os.chdir(bin_dir)
import multiprocessing
print('Building taichi...')
num_make_threads = min(20, multiprocessing.cpu_count())
if get_os_name() == 'win':
make_ret = os.system(
"msbuild /p:Configuration=Release /p:Platform=x64 /m taichi.sln")
else:
make_ret = os.system('make -j {}'.format(num_make_threads))
if make_ret != 0:
print(' Error: Build failed.')
exit(-1)
os.chdir(tmp_cwd)
def check_exists(src):
if not os.path.exists(src):
raise FileNotFoundError(
f'File "{src}" not exist. Installation corrupted or build incomplete?'
)
def prepare_sandbox():
'''
Returns a temporary directory, which will be automatically deleted on exit.
It may contain the taichi_core shared object or some misc. files.
'''
import atexit
import shutil
from tempfile import mkdtemp
tmp_dir = mkdtemp(prefix='taichi-')
atexit.register(shutil.rmtree, tmp_dir)
print(f'[Taichi] preparing sandbox at {tmp_dir}')
os.mkdir(os.path.join(tmp_dir, 'runtime/'))
return tmp_dir
def get_unique_task_id():
import datetime
import random
return datetime.datetime.now().strftime('task-%Y-%m-%d-%H-%M-%S-r') + (
'%05d' % random.randint(0, 10000))
if is_release():
print("[Taichi] mode=release")
sys.path.append(os.path.join(package_root(), 'lib'))
if get_os_name() != 'win':
link_src = os.path.join(package_root(), 'lib', 'taichi_core.so')
link_dst = os.path.join(package_root(), 'lib', 'libtaichi_core.so')
# For llvm jit to find the runtime symbols
if not os.path.exists(link_dst):
os.symlink(link_src, link_dst)
import_ti_core()
if get_os_name() != 'win':
dll = ctypes.CDLL(get_core_shared_object(), mode=ctypes.RTLD_LOCAL)
# The C backend needs a temporary directory for the generated .c and compiled .so files:
ti_core.set_tmp_dir(locale_encode(prepare_sandbox(
))) # TODO: always allocate a tmp_dir for all situations
ti_core.set_python_package_dir(package_root())
os.makedirs(ti_core.get_repo_dir(), exist_ok=True)
else:
print("[Taichi] mode=development")
if get_os_name() == 'osx':
bin_dir = get_bin_directory()
os.environ['DYLD_FALLBACK_LIBRARY_PATH'] = get_runtime_directory()
lib_path = os.path.join(bin_dir, 'libtaichi_core.dylib')
tmp_cwd = os.getcwd()
tmp_dir = prepare_sandbox()
check_exists(lib_path)
shutil.copy(lib_path, os.path.join(tmp_dir, 'taichi_core.so'))
os.chdir(tmp_dir)
sys.path.append(tmp_dir)
import taichi_core as ti_core
os.chdir(tmp_cwd)
# TODO: unify importing infrastructure:
elif get_os_name() == 'linux':
bin_dir = get_bin_directory()
if 'LD_LIBRARY_PATH' in os.environ:
os.environ['LD_LIBRARY_PATH'] += ':/usr/lib64/'
else:
os.environ['LD_LIBRARY_PATH'] = '/usr/lib64/'
lib_path = os.path.join(bin_dir, 'libtaichi_core.so')
check_exists(lib_path)
tmp_cwd = os.getcwd()
tmp_dir = prepare_sandbox()
check_exists(lib_path)
shutil.copy(lib_path, os.path.join(tmp_dir, 'taichi_core.so'))
os.chdir(tmp_dir)
sys.path.append(tmp_dir)
try:
import_ti_core(tmp_dir)
except Exception as e:
from colorama import Fore, Back, Style
print_red_bold("Taichi core import failed: ", end='')
print(e)
print(
Fore.YELLOW + "check this page for possible solutions:\n"
"https://taichi.readthedocs.io/en/stable/install.html#troubleshooting"
+ Fore.RESET)
exit(-1)
os.chdir(tmp_cwd)
elif get_os_name() == 'win':
bin_dir = get_bin_directory()
dll_path_invalid = os.path.join(bin_dir, 'libtaichi_core.dll')
assert not os.path.exists(dll_path_invalid)
possible_folders = ['Debug', 'RelWithDebInfo', 'Release']
detected_dlls = []
for folder in possible_folders:
dll_path = os.path.join(bin_dir, folder, 'taichi_core.dll')
if os.path.exists(dll_path):
detected_dlls.append(dll_path)
if len(detected_dlls) == 0:
raise FileNotFoundError(
f'Cannot find Taichi core dll under {get_bin_directory()}/{possible_folders}'
)
elif len(detected_dlls) != 1:
print('Warning: multiple Taichi core dlls found:')
for dll in detected_dlls:
print(' ', dll)
print(f'Using {detected_dlls[0]}')
dll_path = detected_dlls[0]
# On windows when an dll/pyd is loaded, we cannot write to it any more
old_wd = os.getcwd()
os.chdir(bin_dir)
if create_sand_box_on_windows:
# Create a sandbox for separated core lib development and loading
folder = os.path.join(get_output_directory(), 'tmp',
get_unique_task_id())
lib_dir = os.path.join(get_repo_directory(), 'external', 'lib')
os.environ['PATH'] += ';' + lib_dir
os.makedirs(folder)
shutil.copy(dll_path, os.path.join(folder, 'taichi_core.pyd'))
os.environ['PATH'] += ';' + folder
sys.path.append(folder)
else:
shutil.copy(dll_path, os.path.join(bin_dir, 'taichi_core.pyd'))
sys.path.append(bin_dir)
try:
import taichi_core as ti_core
except Exception as e:
print(e)
print()
print(
'Hint: please make sure the major and minor versions of the Python executable is correct.'
)
print()
raise e
os.chdir(old_wd)
log_level = os.environ.get('TI_LOG_LEVEL', '')
if log_level:
ti_core.set_logging_level(log_level)
def get_dll_name(name):
if get_os_name() == 'linux':
return 'libtaichi_%s.so' % name
elif get_os_name() == 'osx':
return 'libtaichi_%s.dylib' % name
elif get_os_name() == 'win':
return 'taichi_%s.dll' % name
else:
raise Exception(f"Unknown OS: {get_os_name()}")
def load_module(name, verbose=True):
if verbose:
print('Loading module', name)
try:
if get_os_name() == 'osx':
mode = ctypes.RTLD_LOCAL
else:
mode = ctypes.RTLD_GLOBAL
if '.so' in name:
ctypes.PyDLL(name, mode=mode)
else:
ctypes.PyDLL(os.path.join(get_repo_directory(), 'build',
get_dll_name(name)),
mode=mode)
except Exception as e:
print(Fore.YELLOW +
"Warning: module [{}] loading failed: {}".format(name, e) +
Style.RESET_ALL)
def at_startup():
if not is_release():
output_dir = get_output_directory()
if not os.path.exists(output_dir):
print('Making output directory')
os.mkdir(output_dir)
ti_core.set_core_state_python_imported(True)
def start_memory_monitoring(output_fn, pid=-1, interval=1):
# removing dependency on psutil
return
import os, psutil, time
if pid == -1:
pid = os.getpid()
import multiprocessing
def task():
with open(output_fn, 'w') as f:
process = psutil.Process(pid)
while True:
try:
mem = process.memory_info().rss
except:
mem = -1
time.sleep(interval)
print(time.time(), mem, file=f)
f.flush()
proc = multiprocessing.Process(target=task, daemon=True)
proc.start()
def require_version(major, minor=None, patch=None):
versions = [
int(ti_core.get_version_major()),
int(ti_core.get_version_minor()),
int(ti_core.get_version_patch()),
]
match = major == versions[0] and (
minor < versions[1] or minor == versions[1] and patch <= versions[2])
if match:
return
else:
print("Taichi version mismatch. required >= {}.{}.{}".format(
major, minor, patch))
print("Installed =", ti_core.get_version_string())
raise Exception("Taichi version mismatch")
at_startup()
def _print_taichi_header():
dev_mode = not is_release()
header = '[Taichi] '
if dev_mode:
header += '<dev mode>, '
else:
header += f'version {ti_core.get_version_string()}, '
llvm_version = ti_core.get_llvm_version_string()
header += f'llvm {llvm_version}, '
commit_hash = ti_core.get_commit_hash()
commit_hash = commit_hash[:8]
header += f'commit {commit_hash}, '
header += f'{get_os_name()}, '
py_ver = '.'.join(str(x) for x in sys.version_info[:3])
header += f'python {py_ver}'
print(header)
_print_taichi_header()
__all__ = [
'ti_core',
'build',
'load_module',
'start_memory_monitoring',
'is_release',
'package_root',
'require_version',
]
|
py | 1a43f7bf42d121bd5ed44c064732cc7bd94bb4d9 | import copy
import numpy
import opt_einsum
from .single_amplitude_sim import SingleAmplitudeSimulator
from .tensor_network import Node, TensorNetwork |
py | 1a43f8cbdc613b75178bf975a05f96d477fe2fb5 | import json
import botsdk.Bot
import botsdk.BotRequest
from botsdk.tool.MessageChain import MessageChain
from botsdk.tool.BotPlugin import BotPlugin
from botsdk.tool.Cookie import *
class plugin(BotPlugin):
def __init__(self):
super().__init__()
self.listenType = []
#[["type1",func],["type2",func],...,["typen",func]]
self.listenTarget = [["GroupMessage", "空调", self.kongtiao]]
#[["type1","target",func],["type2","target",func],...,["typen","target",func]]
self.name = "空调"
#"插件名称"
self.info = "好热哦"
#"插件信息"
self.help = "/空调"
#"插件帮助"
self.permissionSet = {"OWNER","ADMINISTRATOR","MEMBER"}
self.canDetach = True
async def kongtiao(self, request):
bot = request.bot
groupid = request.groupId
await bot.sendGroupMessage(request.groupId, MessageChain().text("https://ac.yunyoujun.cn/#/").getData())
def handle(*args, **kwargs):
return plugin(*args, **kwargs)
|
py | 1a43f8d775e1da25985921a1b67e7c8a9f9b4e61 | """Custom annotations"""
from abc import ABCMeta
from inspect import Signature
from typing import Any, List, Tuple, Type, TypeVar
import jetblack_serialization.typing_inspect_ex as typing_inspect
from .types import Annotation
T = TypeVar('T')
class SerializationAnnotation(metaclass=ABCMeta):
"""The base serialization annotation class"""
class DefaultValue:
def __init__(self, value: Any) -> None:
self.value = value
def is_any_annotation_of_type(annotation: Annotation, tp: Type[Any]) -> bool:
if not typing_inspect.is_annotated_type(annotation):
return False
for item in typing_inspect.get_metadata(annotation):
if issubclass(type(item), tp):
return True
return False
def get_all_annotations_of_type(
annotation: Annotation,
tp: Type[T]
) -> Tuple[Annotation, List[T]]:
type_annotation = typing_inspect.get_origin(annotation)
serialization_annotations = [
serialization_annotation
for serialization_annotation in typing_inspect.get_metadata(annotation) or []
if issubclass(type(serialization_annotation), tp)
]
return type_annotation, serialization_annotations
def is_any_serialization_annotation(annotation: Annotation) -> bool:
"""Determine if the annotation is of type Annotation[T, SerializationAnnotation]
Args:
annotation (Any): The annotation
Returns:
bool: True if the annotation is of type
Annotation[T, SerializationAnnotation], otherwise False
"""
return is_any_annotation_of_type(annotation, SerializationAnnotation)
def get_all_serialization_annotations(
annotation: Annotation
) -> Tuple[Annotation, List[SerializationAnnotation]]:
"""Gets the type T of Annotation[T, SerializationAnnotation]
Args:
annotation (Any): The annotation
Returns:
Tuple[Annotation, List[SerializationAnnotation]]: The type and the
serialization annotationa
"""
return get_all_annotations_of_type(annotation, SerializationAnnotation)
def is_any_default_annotation(annotation: Annotation) -> bool:
return is_any_annotation_of_type(annotation, DefaultValue)
def get_default_annotation(
annotation: Annotation
) -> Tuple[Annotation, DefaultValue]:
typ, annotations = get_all_annotations_of_type(
annotation, DefaultValue)
assert len(annotations) == 1, "There can be only one"
return typ, annotations[0]
def get_typed_dict_key_default(td):
if is_any_default_annotation(td):
_, annotation = get_default_annotation(td)
return annotation.value
return Signature.empty
|
py | 1a43f9e6a57f9a3d1634725dccc03f7f60bc66da | # -*- coding: utf-8 -*-
#
# Copyright 2017 Open Targets
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Tests for Docker container wrapper for Luigi.
Requires:
- docker: ``pip install docker``
Written and maintained by Andrea Pierleoni (@apierleoni).
Contributions by Eliseo Papa (@elipapa)
"""
import tempfile
from helpers import unittest
from tempfile import NamedTemporaryFile
import luigi
import logging
from luigi.contrib.docker_runner import DockerTask
logger = logging.getLogger('luigi-interface')
try:
import docker
from docker.errors import ContainerError, ImageNotFound
client = docker.from_env()
client.version()
except ImportError:
raise unittest.SkipTest('Unable to load docker module')
except Exception:
raise unittest.SkipTest('Unable to connect to docker daemon')
tempfile.tempdir = '/tmp' # set it explicitely to make it work out of the box in mac os
local_file = NamedTemporaryFile()
local_file.write(b'this is a test file\n')
local_file.flush()
class SuccessJob(DockerTask):
image = "busybox:latest"
name = "SuccessJob"
class FailJobImageNotFound(DockerTask):
image = "image-does-not-exists"
name = "FailJobImageNotFound"
class FailJobContainer(DockerTask):
image = "busybox"
name = "FailJobContainer"
command = 'cat this-file-does-not-exist'
class WriteToTmpDir(DockerTask):
image = "busybox"
name = "WriteToTmpDir"
container_tmp_dir = '/tmp/luigi-test'
command = 'test -d /tmp/luigi-test'
# command = 'test -d $LUIGI_TMP_DIR'# && echo ok >$LUIGI_TMP_DIR/test'
class MountLocalFileAsVolume(DockerTask):
image = "busybox"
name = "MountLocalFileAsVolume"
# volumes= {'/tmp/local_file_test': {'bind': local_file.name, 'mode': 'rw'}}
binds = [local_file.name + ':/tmp/local_file_test']
command = 'test -f /tmp/local_file_test'
class MountLocalFileAsVolumeWithParam(DockerTask):
dummyopt = luigi.Parameter()
image = "busybox"
name = "MountLocalFileAsVolumeWithParam"
binds = [local_file.name + ':/tmp/local_file_test']
command = 'test -f /tmp/local_file_test'
class MountLocalFileAsVolumeWithParamRedefProperties(DockerTask):
dummyopt = luigi.Parameter()
image = "busybox"
name = "MountLocalFileAsVolumeWithParamRedef"
@property
def binds(self):
return [local_file.name + ':/tmp/local_file_test' + self.dummyopt]
@property
def command(self):
return 'test -f /tmp/local_file_test' + self.dummyopt
def complete(self):
return True
class MultipleDockerTask(luigi.WrapperTask):
'''because the volumes property is defined as a list, spinning multiple
containers led to conflict in the volume binds definition, with multiple
host directories pointing to the same container directory'''
def requires(self):
return [MountLocalFileAsVolumeWithParam(dummyopt=opt)
for opt in ['one', 'two', 'three']]
class MultipleDockerTaskRedefProperties(luigi.WrapperTask):
def requires(self):
return [MountLocalFileAsVolumeWithParamRedefProperties(dummyopt=opt)
for opt in ['one', 'two', 'three']]
class TestDockerTask(unittest.TestCase):
# def tearDown(self):
# local_file.close()
def test_success_job(self):
success = SuccessJob()
luigi.build([success], local_scheduler=True)
self.assertTrue(success)
def test_temp_dir_creation(self):
writedir = WriteToTmpDir()
writedir.run()
def test_local_file_mount(self):
localfile = MountLocalFileAsVolume()
localfile.run()
def test_fail_job_image_not_found(self):
fail = FailJobImageNotFound()
self.assertRaises(ImageNotFound, fail.run)
def test_fail_job_container(self):
fail = FailJobContainer()
self.assertRaises(ContainerError, fail.run)
def test_multiple_jobs(self):
worked = MultipleDockerTask()
luigi.build([worked], local_scheduler=True)
self.assertTrue(worked)
def test_multiple_jobs2(self):
worked = MultipleDockerTaskRedefProperties()
luigi.build([worked], local_scheduler=True)
self.assertTrue(worked)
|
py | 1a43fa2af64ee26dc2d694c8a8d7873b242df017 | import numpy as np
import cv2
import time
import os
import psutil
import gc
from grid_game import GridGame
from util.mem_convert import bytes2human
class Environment:
"""docstring for Environment"""
BUFFER_LEN = 1
EPISODE_STEPS = 18000
EPOCH_COUNT = 10
EPOCH_STEPS = 10000
EVAL_EPS = 0.001
FRAMES_SKIP = 1
FRAME_HEIGHT = 4
FRAME_WIDTH = 4
MAX_NO_OP = 0
MAX_REWARD = 0
def __init__(self, rng, one_state = False, display_screen = False):
self.height = Environment.FRAME_HEIGHT
self.width = Environment.FRAME_WIDTH
self.api = GridGame(self.height, self.width, rng)
self.rng = rng
self.display_screen = display_screen
self.minimal_actions = self.api.getMinimalActionSet()
self.repeat = Environment.FRAMES_SKIP
self.buffer_len = Environment.BUFFER_LEN
self.eval_eps = Environment.EVAL_EPS
self.merge_frame = np.zeros((self.buffer_len
, self.height
, self.width)
, dtype = np.uint8)
self.merge_id = 0
self.max_reward = Environment.MAX_REWARD
self.log_dir = ''
self.network_dir = ''
print self.minimal_actions
def get_action_count(self):
return len(self.minimal_actions)
def train(self, agent, store_freq, folder = None, start_epoch = 0
, ask_for_more = False):
self._open_log_files(agent, folder)
obs = np.zeros((self.height, self.width), dtype = np.uint8)
epoch_count = Environment.EPOCH_COUNT
self.need_reset = True
epoch = start_epoch
epoch_count = Environment.EPOCH_COUNT
while epoch < epoch_count:
steps_left = Environment.EPOCH_STEPS
print "\n" + "=" * 50
print "Epoch #%d" % (epoch + 1)
episode = 0
train_start = time.time()
while steps_left > 0:
num_step, _ = self._run_episode(agent, steps_left, obs)
steps_left -= num_step
episode += 1
if steps_left == 0 or episode % 100 == 0:
print "Finished episode #%d, steps_left = %d" \
% (episode, steps_left)
train_end = time.time()
valid_values = agent.get_validate_values()
eval_values = self.evaluate(agent)
test_end = time.time()
train_time = train_end - train_start
test_time = test_end - train_end
step_per_sec = Environment.EPOCH_STEPS * 1. / max(1, train_time)
print "\tFinished epoch #%d, episode trained = %d\n" \
"\tValidate values = %.3f, evaluate reward = %.3f\n"\
"\tTrain time = %.0fs, test time = %.0fs, steps/sec = %.4f" \
% (epoch + 1, episode, valid_values, eval_values\
, train_time, test_time, step_per_sec)
self._update_log_files(agent, epoch + 1, episode
, valid_values, eval_values
, train_time, test_time
, step_per_sec, store_freq)
gc.collect()
epoch += 1
if ask_for_more and epoch >= epoch_count:
st = raw_input("\n***Enter number of epoch to continue training: ")
more_epoch = 0
try:
more_epoch = int(st)
except Exception, e:
more_epoch = 0
epoch_count += more_epoch
def evaluate(self, agent, episodes = 30, obs = None):
print "\n***Start evaluating"
if obs is None:
obs = np.zeros((self.height, self.width), dtype = np.uint8)
sum_reward = 0.0
sum_step = 0.0
self.need_reset = True
for episode in xrange(episodes):
step, reward = self._run_episode(agent,
Environment.EPISODE_STEPS, obs, self.eval_eps, evaluating = True
, print_Q = self.display_screen)
sum_reward += reward
sum_step += step
print "Finished episode %d, reward = %d, step = %d" \
% (episode + 1, reward, step)
self.need_reset = True
print "Average reward per episode = %.4f" \
% (sum_reward / episodes)
print "Average step per episode = %.4f" % (sum_step / episodes)
return sum_reward / episodes
def _prepare_game(self):
if self.need_reset or self.api.game_over():
self.api.reset_game()
self.need_reset = False
if Environment.MAX_NO_OP > 0:
num_no_op = self.rng.randint(Environment.MAX_NO_OP + 1) \
+ self.buffer_len
for _ in xrange(num_no_op):
self.api.act(0)
for _ in xrange(self.buffer_len):
self._update_buffer()
def _run_episode(self, agent, steps_left, obs
, eps = 0.0, evaluating = False, print_Q = False):
self._prepare_game()
start_lives = self.api.lives()
step_count = 0
sum_reward = 0
is_terminal = False
while step_count < steps_left and not is_terminal:
self._get_screen(obs)
action_id, is_random = agent.get_action(obs, eps, evaluating)
reward = self._repeat_action(self.minimal_actions[action_id])
reward_clip = reward
if self.max_reward > 0:
reward_clip = np.clip(reward, -self.max_reward, self.max_reward)
if print_Q:
print "Observation = \n", np.int32(obs) - self.api.translate
print "Action%s = %d" % (" (random)" if is_random else ""
, self.minimal_actions[action_id])
print "Reward = %d" % (reward)
raw_input()
life_lost = not evaluating and self.api.lives() < start_lives
is_terminal = self.api.game_over() or life_lost \
or step_count + 1 >= steps_left
agent.add_experience(obs, is_terminal, action_id, reward_clip
, evaluating)
sum_reward += reward
step_count += 1
return step_count, sum_reward
def _update_buffer(self):
self.api.getScreenGrayscale(self.merge_frame[self.merge_id, ...])
self.merge_id = (self.merge_id + 1) % self.buffer_len
def _repeat_action(self, action):
reward = 0
for i in xrange(self.repeat):
reward += self.api.act(action)
if i + self.buffer_len >= self.repeat:
self._update_buffer()
return reward
def _get_screen(self, resized_frame):
self._resize_frame(self.merge_frame.max(axis = 0), resized_frame)
def _resize_frame(self, src_frame, dst_frame):
cv2.resize(src = src_frame, dst = dst_frame,
dsize = (self.width, self.height),
interpolation = cv2.INTER_LINEAR)
def _open_log_files(self, agent, folder):
time_str = time.strftime("_%m-%d-%H-%M", time.localtime())
base_rom_name = 'grid'
if folder is not None:
self.log_dir = folder
self.network_dir = self.log_dir + '/network'
return
self.log_dir = '../run_results/grid/' + base_rom_name + time_str
self.network_dir = self.log_dir + '/network'
try:
os.stat(self.log_dir)
except OSError:
os.makedirs(self.log_dir)
try:
os.stat(self.network_dir)
except OSError:
os.makedirs(self.network_dir)
with open(self.log_dir + '/info.txt', 'w') as f:
f.write(agent.get_info())
f.write(self.api.game_info() + '\n\n')
self._write_info(f, Environment)
self._write_info(f, agent.__class__)
self._write_info(f, agent.network.__class__)
with open(self.log_dir + '/results.csv', 'w') as f:
f.write("epoch,episode_train,validate_values,evaluate_reward"\
",train_time,test_time,steps_per_second\n")
mem = psutil.virtual_memory()
with open(self.log_dir + '/memory.csv', 'w') as f:
f.write("epoch,available,free,buffers,cached"\
",available_readable,used_percent\n")
f.write("%d,%d,%d,%d,%d,%s,%.1f\n" % \
(0, mem.available, mem.free, mem.buffers, mem.cached
, bytes2human(mem.available), mem.percent))
def _update_log_files(self, agent, epoch, episode, valid_values
, eval_values, train_time, test_time, step_per_sec
, store_freq):
print "Updating log files"
with open(self.log_dir + '/results.csv', 'a') as f:
f.write("%d,%d,%.4f,%.4f,%d,%d,%.4f\n" % \
(epoch, episode, valid_values, eval_values
, train_time, test_time, step_per_sec))
mem = psutil.virtual_memory()
with open(self.log_dir + '/memory.csv', 'a') as f:
f.write("%d,%d,%d,%d,%d,%s,%.1f\n" % \
(epoch, mem.available, mem.free, mem.buffers, mem.cached
, bytes2human(mem.available), mem.percent))
agent.dump_network(self.network_dir + ('/%03d' % (epoch)) + '.npz')
if (store_freq >= 0 and epoch >= Environment.EPOCH_COUNT) or \
(store_freq > 0 and (epoch % store_freq == 0)):
agent.dump_exp(self.network_dir + '/exp.npz')
def _write_info(self, f, c):
hyper_params = [attr for attr in dir(c) \
if not attr.startswith("__") and not callable(getattr(c, attr))]
for param in hyper_params:
f.write(str(c.__name__) + '.' + param + ' = ' + \
str(getattr(c, param)) + '\n')
f.write('\n')
|
py | 1a43fb33bb4f7b3dbaf548f3449fbb44d7041f7d | import copy
site = {
'html': {
'head': {
'title': 'Куплю/продам телефон недорого'
},
'body': {
'h2': 'У нас самая низкая цена на iPhone',
'div': 'Купить',
'p': 'Продать'
}
}
}
def f(n,data=site, new_list =list()):
if n == 0:
return
name = input('Введите название сайта:')
data['html']['head']['title'] = f'куплю\продам {name} не дорого'
data['html']['body']['h2'] = f'У нас самая низкая цена на {name}'
new_list.append(str(data))
for i in new_list:
print(i)
f(n-1)
f(2)
|
py | 1a43fd2b252e1a19fd80cae2b530f2012e62f8f3 | from setuptools import setup
setup(
name="mani_skill",
version="1.0",
author="SU Lab at UC San Diego",
zip_safe=False,
install_requires=[
"gym",
"open3d",
"pyyaml",
"rtree",
"sapien",
"scipy",
"shapely",
"transforms3d",
"trimesh",
]
)
|
py | 1a43fdc1530c1de8ac5f40b649bd0b7ab9e37102 | # -*- coding: utf-8 -*-
"""
flask.session
~~~~~~~~~~~~~
This module used to flask with the session global so we moved it
over to flask.sessions
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from warnings import warn
warn(DeprecationWarning('please use flask.sessions instead'))
from .sessions import SecureCookieSession, NullSession
Session = SecureCookieSession
_NullSession = NullSession
|
py | 1a43fe028c8ad4b123d80f4d549072d736543bd4 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
ver_dic = {}
version_file_name = "gmsh_interop/version.py"
with open(version_file_name) as version_file:
version_file_contents = version_file.read()
exec(compile(version_file_contents, version_file_name, 'exec'), ver_dic)
setup(name="gmsh_interop",
version=ver_dic["VERSION_TEXT"],
description="A parser for GMSH's .msh format",
long_description=open("README.rst", "r").read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
],
install_requires=[
"six>=1.8.0",
"numpy>=1.6.0",
"pytools",
],
author="Andreas Kloeckner",
url="http://github.com/inducer/gmsh_interop",
author_email="[email protected]",
license="MIT",
packages=find_packages())
|
py | 1a43fe08ffd3f0d1326b31da7d66c3c674eba23d | from unittest.mock import Mock, patch
from urllib.parse import urlencode
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
from django.utils import translation
from phonenumber_field.phonenumber import PhoneNumber
from two_factor.gateways.fake import Fake
from two_factor.gateways.twilio.gateway import Twilio
class TwilioGatewayTest(TestCase):
def test_call_app(self):
url = reverse('two_factor_twilio:call_app', args=['123456'])
response = self.client.get(url)
self.assertEqual(response.content,
b'<?xml version="1.0" encoding="UTF-8" ?>'
b'<Response>'
b' <Gather timeout="15" numDigits="1" finishOnKey="">'
b' <Say language="en">Hi, this is testserver calling. '
b'Press any key to continue.</Say>'
b' </Gather>'
b' <Say language="en">You didn\'t press any keys. Good bye.</Say>'
b'</Response>')
url = reverse('two_factor_twilio:call_app', args=['123456'])
response = self.client.post(url)
self.assertEqual(response.content,
b'<?xml version="1.0" encoding="UTF-8" ?>'
b'<Response>'
b' <Say language="en">Your token is 1. 2. 3. 4. 5. 6. '
b'Repeat: 1. 2. 3. 4. 5. 6. Good bye.</Say>'
b'</Response>')
# there is a en-gb voice
response = self.client.get('%s?%s' % (url, urlencode({'locale': 'en-gb'})))
self.assertContains(response, '<Say language="en-gb">')
# there is no Frysian voice
response = self.client.get('%s?%s' % (url, urlencode({'locale': 'fy-nl'})))
self.assertContains(response, '<Say language="en">')
@override_settings(
TWILIO_ACCOUNT_SID='SID',
TWILIO_AUTH_TOKEN='TOKEN',
TWILIO_CALLER_ID='+456',
)
@patch('two_factor.gateways.twilio.gateway.Client')
def test_gateway(self, client):
twilio = Twilio()
client.assert_called_with('SID', 'TOKEN')
for code in ['654321', '054321', '87654321', '07654321']:
twilio.make_call(device=Mock(number=PhoneNumber.from_string('+123')), token=code)
client.return_value.calls.create.assert_called_with(
from_='+456', to='+123', method='GET', timeout=15,
url='http://testserver/twilio/inbound/two_factor/%s/?locale=en-us' % code)
twilio.send_sms(device=Mock(number=PhoneNumber.from_string('+123')), token=code)
client.return_value.messages.create.assert_called_with(
to='+123', body='Your authentication token is %s' % code, from_='+456')
client.return_value.calls.create.reset_mock()
with translation.override('en-gb'):
twilio.make_call(device=Mock(number=PhoneNumber.from_string('+123')), token=code)
client.return_value.calls.create.assert_called_with(
from_='+456', to='+123', method='GET', timeout=15,
url='http://testserver/twilio/inbound/two_factor/%s/?locale=en-gb' % code)
client.return_value.calls.create.reset_mock()
with translation.override('en-gb'):
twilio.make_call(device=Mock(number=PhoneNumber.from_string('+123')), token=code)
client.return_value.calls.create.assert_called_with(
from_='+456', to='+123', method='GET', timeout=15,
url='http://testserver/twilio/inbound/two_factor/%s/?locale=en-gb' % code)
@override_settings(
TWILIO_ACCOUNT_SID='SID',
TWILIO_AUTH_TOKEN='TOKEN',
TWILIO_CALLER_ID='+456',
)
@patch('two_factor.gateways.twilio.gateway.Client')
def test_invalid_twilio_language(self, client):
# This test assumes an invalid twilio voice language being present in
# the Arabic translation. Might need to create a faux translation when
# the translation is fixed.
url = reverse('two_factor_twilio:call_app', args=['123456'])
with self.assertRaises(NotImplementedError):
self.client.get('%s?%s' % (url, urlencode({'locale': 'ar'})))
# make_call doesn't use the voice_language, but it should raise early
# to ease debugging.
with self.assertRaises(NotImplementedError):
twilio = Twilio()
with translation.override('ar'):
twilio.make_call(device=Mock(number='+123'), token='654321')
class FakeGatewayTest(TestCase):
@patch('two_factor.gateways.fake.logger')
def test_gateway(self, logger):
fake = Fake()
for code in ['654321', '87654321']:
fake.make_call(device=Mock(number=PhoneNumber.from_string('+123')), token=code)
logger.info.assert_called_with(
'Fake call to %s: "Your token is: %s"', '+123', code)
fake.send_sms(device=Mock(number=PhoneNumber.from_string('+123')), token=code)
logger.info.assert_called_with(
'Fake SMS to %s: "Your token is: %s"', '+123', code)
|
py | 1a43ff174a36533e8e9d5d777c32e138a2562677 | """This module contains the general information for LsbootUsbFlashStorageImage ManagedObject."""
from ...ucsmo import ManagedObject
from ...ucscoremeta import MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class LsbootUsbFlashStorageImageConsts:
TYPE_EMBEDDED_LOCAL_JBOD = "embedded-local-jbod"
TYPE_EMBEDDED_LOCAL_LUN = "embedded-local-lun"
TYPE_LOCAL_ANY = "local-any"
TYPE_LOCAL_JBOD = "local-jbod"
TYPE_LOCAL_LUN = "local-lun"
TYPE_NVME = "nvme"
TYPE_SD_CARD = "sd-card"
TYPE_USB_EXTERN = "usb-extern"
TYPE_USB_INTERN = "usb-intern"
class LsbootUsbFlashStorageImage(ManagedObject):
"""This is LsbootUsbFlashStorageImage class."""
consts = LsbootUsbFlashStorageImageConsts()
naming_props = set([])
mo_meta = MoMeta("LsbootUsbFlashStorageImage", "lsbootUsbFlashStorageImage", "sd-card", VersionMeta.Version221b, "InputOutput", 0x3f, [], ["admin", "ls-compute", "ls-config", "ls-config-policy", "ls-server", "ls-server-policy", "ls-storage", "ls-storage-policy"], [u'lsbootLocalStorage'], [u'lsbootUEFIBootParam'], ["Add", "Get", "Remove", "Set"])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version221b, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version221b, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"order": MoPropertyMeta("order", "order", "ushort", VersionMeta.Version221b, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, [], ["1-16"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version221b, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version221b, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"type": MoPropertyMeta("type", "type", "string", VersionMeta.Version221b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["embedded-local-jbod", "embedded-local-lun", "local-any", "local-jbod", "local-lun", "nvme", "sd-card", "usb-extern", "usb-intern"], []),
}
prop_map = {
"childAction": "child_action",
"dn": "dn",
"order": "order",
"rn": "rn",
"sacl": "sacl",
"status": "status",
"type": "type",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.order = None
self.sacl = None
self.status = None
self.type = None
ManagedObject.__init__(self, "LsbootUsbFlashStorageImage", parent_mo_or_dn, **kwargs)
|
py | 1a43ff2547ed8b7023ca750d74393ca68dc05ec4 | from typing import Dict, List
from allennlp.common.checks import ConfigurationError
# from allennlp.common.params import Params
from allennlp.common.util import pad_sequence_to_length
from allennlp.data.tokenizers.token import Token
from allennlp.data.token_indexers.token_indexer import TokenIndexer
from allennlp.data.vocabulary import Vocabulary
def _make_bos_eos(
character: int,
padding_character: int,
beginning_of_word_character: int,
end_of_word_character: int,
max_word_length: int
):
char_ids = [padding_character] * max_word_length
char_ids[0] = beginning_of_word_character
char_ids[1] = character
char_ids[2] = end_of_word_character
return char_ids
class ELMoCharacterMapper:
"""
Maps individual tokens to sequences of character ids, compatible with ELMo.
To be consistent with previously trained models, we include it here as special of existing
character indexers.
"""
max_word_length = 50
# char ids 0-255 come from utf-8 encoding bytes
# assign 256-300 to special chars
beginning_of_sentence_character = 256 # <begin sentence>
end_of_sentence_character = 257 # <end sentence>
beginning_of_word_character = 258 # <begin word>
end_of_word_character = 259 # <end word>
padding_character = 260 # <padding>
beginning_of_sentence_characters = _make_bos_eos(
beginning_of_sentence_character,
padding_character,
beginning_of_word_character,
end_of_word_character,
max_word_length
)
end_of_sentence_characters = _make_bos_eos(
end_of_sentence_character,
padding_character,
beginning_of_word_character,
end_of_word_character,
max_word_length
)
bos_token = '<S>'
eos_token = '</S>'
@staticmethod
def convert_word_to_char_ids(word: str) -> List[int]:
if word == ELMoCharacterMapper.bos_token:
char_ids = ELMoCharacterMapper.beginning_of_sentence_characters
elif word == ELMoCharacterMapper.eos_token:
char_ids = ELMoCharacterMapper.end_of_sentence_characters
else:
word_encoded = word.encode('utf-8', 'ignore')[:(ELMoCharacterMapper.max_word_length-2)]
char_ids = [ELMoCharacterMapper.padding_character] * ELMoCharacterMapper.max_word_length
char_ids[0] = ELMoCharacterMapper.beginning_of_word_character
for k, chr_id in enumerate(word_encoded, start=1):
char_ids[k] = chr_id
char_ids[len(word_encoded) + 1] = ELMoCharacterMapper.end_of_word_character
# +1 one for masking
return [c + 1 for c in char_ids]
@TokenIndexer.register("elmo_characters")
class ELMoTokenCharactersIndexer(TokenIndexer[List[int]]):
"""
Convert a token to an array of character ids to compute ELMo representations.
Parameters
----------
namespace : ``str``, optional (default=``elmo_characters``)
"""
# pylint: disable=no-self-use
def __init__(self,
namespace: str = 'elmo_characters') -> None:
self._namespace = namespace
def count_vocab_items(self, token: Token, counter: Dict[str, Dict[str, int]]):
pass
def token_to_indices(self, token: Token, vocabulary: Vocabulary) -> List[int]:
# pylint: disable=unused-argument
if token.text is None:
raise ConfigurationError('ELMoTokenCharactersIndexer needs a tokenizer '
'that retains text')
return ELMoCharacterMapper.convert_word_to_char_ids(token.text)
def get_padding_lengths(self, token: List[int]) -> Dict[str, int]:
# pylint: disable=unused-argument
return {}
def get_padding_token(self) -> List[int]:
return []
@staticmethod
def _default_value_for_padding():
return [0] * ELMoCharacterMapper.max_word_length
def pad_token_sequence(self,
tokens: List[List[int]],
desired_num_tokens: int,
padding_lengths: Dict[str, int]) -> List[List[int]]:
# pylint: disable=unused-argument
return pad_sequence_to_length(tokens, desired_num_tokens,
default_value=self._default_value_for_padding)
# @classmethod
# def from_params(cls, params: Params) -> 'ELMoTokenCharactersIndexer':
# """
# Parameters
# ----------
# namespace : ``str``, optional (default=``elmo_characters``)
# """
# namespace = params.pop('namespace', 'elmo_characters')
# params.assert_empty(cls.__name__)
# return cls(namespace=namespace)
|
py | 1a43ffd80e6f0f88f6c902379d2bb15ea924a5d2 |
import os
from sqlalchemy import *
from sqlalchemy import exc
from sqlalchemy.pool import NullPool
from flask import Flask, request, render_template, g, redirect, Response,session, abort, flash, redirect, url_for
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, PasswordField, TextField
from wtforms import validators, ValidationError
from flask_login import login_user,logout_user,current_user
from forms import SignInForm, SignUpForm, CreateMoveForm, CitySearchForm, CommentForm
import datetime
import random
import sys
import requests
app = Flask(__name__)
app.secret_key = 'super secret key'
app.config["DEBUG"] = True # Only include this while you are testing your app
#
# The following is a dummy URI that does not connect to a valid database. You will need to modify it to connect to your Part 2 database in order to use the data.
#
# XXX: The URI should be in the format of:
#
# postgresql://USER:[email protected]/proj1part2
#
# For example, if you had username gravano and password foobar, then the following line would be:
#
# DATABASEURI = "postgresql://tcweiqzaibybtr:0fa949e74472ae8a71a9c1fe09ba5237fa82339e376f43929a54b402b016fa7d@ec2-107-20-255-96.compute-1.amazonaws.com/db9p4jqu1kncao"
#
DATABASEURI = "postgresql://tcweiqzaibybtr:0fa949e74472ae8a71a9c1fe09ba5237fa82339e376f43929a54b402b016fa7d@ec2-107-20-255-96.compute-1.amazonaws.com/db9p4jqu1kncao"
#
# This line creates a database engine that knows how to connect to the URI above.
#
engine = create_engine(DATABASEURI)
@app.before_request
def before_request():
"""
This function is run at the beginning of every web request
(every time you enter an address in the web browser).
We use it to setup a database connection that can be used throughout the request.
The variable g is globally accessible.
"""
try:
g.conn = engine.connect()
except:
print("uh oh, problem connecting to database")
import traceback; traceback.print_exc()
g.conn = None
@app.teardown_request
def teardown_request(exception):
"""
At the end of the web request, this makes sure to close the database connection.
If you don't, the database could run out of memory!
"""
try:
g.conn.close()
except Exception as e:
pass
#
# @app.route is a decorator around index() that means:
# run index() whenever the user tries to access the "/" path using a GET request
#
# If you wanted the user to go to, for example, localhost:8111/foobar/ with POST or GET then you could use:
#
# @app.route("/foobar/", methods=["POST", "GET"])
#
# PROTIP: (the trailing / in the path is important)
#
# see for routing: http://flask.pocoo.org/docs/0.10/quickstart/#routing
# see for decorators: http://simeonfranklin.com/blog/2012/jul/1/python-decorators-in-12-steps/
#
@app.route("/")
def index():
return render_template("index.html")
@app.route("/signin", methods=["GET", "POST"])
def sign_in():
form = SignInForm(csrf_enabled=False)
if request.method == "GET":
return render_template("signin.html", form=form)
elif request.method == "POST":
if not form.validate():
return render_template("signin.html", form=form)
else:
result = g.conn.execute('''SELECT EXISTS (SELECT * FROM people
WHERE email = '%s' AND password = '%s')''' % (form.email.data, form.password.data))
row = result.fetchone()
if row[0]:
person = g.conn.execute('''(SELECT * FROM people WHERE email = '%s' AND password = '%s' LIMIT 1)''' % (form.email.data, form.password.data))
person_id = (person.fetchone()[0])
peeps = g.conn.execute('''(SELECT * FROM people WHERE email = '%s' AND password = '%s' LIMIT 1)''' % (form.email.data, form.password.data))
person_name = (peeps.fetchone()[9])
session['email'] = form.email.data
session['person_id'] = person_id
session['person_name'] = person_name
return render_template("dashboard.html", form=form)
else:
return render_template("signin.html", form=form,session=session)
@app.route("/signup", methods=["GET", "POST"])
def sign_up():
form = SignUpForm(csrf_enabled=False)
if request.method == "GET":
return render_template("signup.html", form=form)
elif request.method == "POST":
if not form.validate():
return render_template("signup.html", form=form)
else:
num = g.conn.execute('''SELECT COUNT(person_id)
FROM people''')
p_id = num.fetchone()[0]
p_id = p_id + 1
g.conn.execute('''INSERT INTO people
(person_id, name, email, race, current_city, pronouns, password)
VALUES ( (%s),(%s),(%s),(%s),(%s),(%s),(%s))''',
p_id, form.name.data, form.email.data, form.race.data,
form.city.data,form.pronouns.data, form.password.data)
return render_template("dashboard.html", form=form)
@app.route("/dashboard/<type>")
def dashboard(type):
moves = g.conn.execute('''(SELECT * FROM moves WHERE type = '%s')''' % type)
return render_template("feed.html", feed = moves)
@app.route("/new", methods=["GET", "POST"])
def moves():
form = CreateMoveForm(csrf_enabled=False)
if request.method == "GET":
return render_template("new.html", form=form)
elif request.method == "POST":
if not form.validate():
return render_template("new.html", form=form)
else:
num = g.conn.execute('''SELECT COUNT(move_id) FROM moves''')
m_id = num.fetchone()[0]
m_id = m_id + 1
today = datetime.date.today()
g.conn.execute('''INSERT INTO moves
(move_id, type, city, date, person_asked, move_text)
VALUES ( (%s),(%s),(%s),(%s),(%s),(%s))''',
m_id, form.move_type.data, form.city.data, today,
session['person_name'], form.text.data)
return redirect(url_for('feed'))
@app.route("/feed", methods=["GET", "POST"])
def feed():
current_city = g.conn.execute('''SELECT current_city from people WHERE people.person_id =(%s)''', session['person_id'])
city = current_city.fetchone()[0]
feed_data = g.conn.execute('''SELECT * from moves WHERE moves.city = (%s) OR moves.person_asked= (%s)''', city, session['person_name'])
return render_template("feed.html", feed=feed_data)
@app.route("/post/<move_id>", methods=["GET", "POST"])
def post(move_id):
form = CommentForm(csrf_enabled=False)
if request.method == "GET":
move = g.conn.execute('''SELECT * from moves WHERE moves.move_id = (%s)''', move_id)
fetch = move.fetchone()
person = fetch[4]
move = fetch[5]
comments_data = g.conn.execute('''SELECT * from comments WHERE comments.move_id = (%s)''', move_id)
return render_template("post.html", move=move, comments=comments_data, form=form, person=person)
elif request.method == "POST":
if not form.validate():
return render_template("post.html", move=move, form=form)
else:
num = g.conn.execute('''SELECT COUNT(comment_id) FROM comments''')
c_id = num.fetchone()[0]
c_id = c_id + 201
today = datetime.date.today()
g.conn.execute('''INSERT INTO comments
(comment_id, comment, comment_date, move_id, person)
VALUES ( (%s),(%s),(%s),(%s),(%s))''',
c_id, form.text.data, today, move_id, session['person_name'])
return redirect(url_for('feed'))
@app.route("/template")
def template():
return render_template("template.html")
if __name__ == "__main__":
app.run(host="0.0.0.0")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.