prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>qgsalgorithmlayouttopdf.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
qgsalgorithmlayouttopdf.cpp
---------------------
begin : June 2020
copyright : (C) 2020 by Nyall Dawson
email : nyall dot dawson at gmail dot com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "qgsalgorithmlayouttopdf.h"
#include "qgslayout.h"
#include "qgslayoutitemmap.h"
#include "qgsprintlayout.h"
#include "qgsprocessingoutputs.h"
#include "qgslayoutexporter.h"
///@cond PRIVATE
QString QgsLayoutToPdfAlgorithm::name() const
{
return QStringLiteral( "printlayouttopdf" );
}
QString QgsLayoutToPdfAlgorithm::displayName() const
{
return QObject::tr( "Export print layout as PDF" );
}
QStringList QgsLayoutToPdfAlgorithm::tags() const
{
return QObject::tr( "layout,composer,composition,save" ).split( ',' );
}
QString QgsLayoutToPdfAlgorithm::group() const
{
return QObject::tr( "Cartography" );
}
QString QgsLayoutToPdfAlgorithm::groupId() const
{
return QStringLiteral( "cartography" );
}
QString QgsLayoutToPdfAlgorithm::shortDescription() const
{
return QObject::tr( "Exports a print layout as a PDF." );
}
QString QgsLayoutToPdfAlgorithm::shortHelpString() const
{
return QObject::tr( "This algorithm outputs a print layout as a PDF file." );
}
void QgsLayoutToPdfAlgorithm::initAlgorithm( const QVariantMap & )
{
addParameter( new QgsProcessingParameterLayout( QStringLiteral( "LAYOUT" ), QObject::tr( "Print layout" ) ) );
std::unique_ptr< QgsProcessingParameterMultipleLayers > layersParam = std::make_unique< QgsProcessingParameterMultipleLayers>( QStringLiteral( "LAYERS" ), QObject::tr( "Map layers to assign to unlocked map item(s)" ), QgsProcessing::TypeMapLayer, QVariant(), true );
layersParam->setFlags( layersParam->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( layersParam.release() );
std::unique_ptr< QgsProcessingParameterNumber > dpiParam = std::make_unique< QgsProcessingParameterNumber >( QStringLiteral( "DPI" ), QObject::tr( "DPI (leave blank for default layout DPI)" ), QgsProcessingParameterNumber::Double, QVariant(), true, 0 );
dpiParam->setFlags( dpiParam->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( dpiParam.release() );
std::unique_ptr< QgsProcessingParameterBoolean > forceVectorParam = std::make_unique< QgsProcessingParameterBoolean >( QStringLiteral( "FORCE_VECTOR" ), QObject::tr( "Always export as vectors" ), false );
forceVectorParam->setFlags( forceVectorParam->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( forceVectorParam.release() );
std::unique_ptr< QgsProcessingParameterBoolean > appendGeorefParam = std::make_unique< QgsProcessingParameterBoolean >( QStringLiteral( "GEOREFERENCE" ), QObject::tr( "Append georeference information" ), true );
appendGeorefParam->setFlags( appendGeorefParam->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( appendGeorefParam.release() );
std::unique_ptr< QgsProcessingParameterBoolean > exportRDFParam = std::make_unique< QgsProcessingParameterBoolean >( QStringLiteral( "INCLUDE_METADATA" ), QObject::tr( "Export RDF metadata (title, author, etc.)" ), true );
exportRDFParam->setFlags( exportRDFParam->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( exportRDFParam.release() );
std::unique_ptr< QgsProcessingParameterBoolean > disableTiled = std::make_unique< QgsProcessingParameterBoolean >( QStringLiteral( "DISABLE_TILED" ), QObject::tr( "Disable tiled raster layer exports" ), false );
disableTiled->setFlags( disableTiled->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( disableTiled.release() );
std::unique_ptr< QgsProcessingParameterBoolean > simplify = std::make_unique< QgsProcessingParameterBoolean >( QStringLiteral( "SIMPLIFY" ), QObject::tr( "Simplify geometries to reduce output file size" ), true );
simplify->setFlags( simplify->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( simplify.release() );
QStringList textExportOptions
{
QObject::tr( "Always Export Text as Paths (Recommended)" ),
QObject::tr( "Always Export Text as Text Objects" )
};
std::unique_ptr< QgsProcessingParameterEnum > textFormat = std::make_unique< QgsProcessingParameterEnum >( QStringLiteral( "TEXT_FORMAT" ), QObject::tr( "Text export" ), textExportOptions, false, 0 );
textFormat->setFlags( textFormat->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( textFormat.release() );
std::unique_ptr< QgsProcessingParameterBoolean > layeredExport = std::make_unique< QgsProcessingParameterBoolean >( QStringLiteral( "SEPARATE_LAYERS" ), QObject::tr( "Export layers as separate PDF files" ), false );
layeredExport->setFlags( layeredExport->flags() | QgsProcessingParameterDefinition::FlagAdvanced );
addParameter( layeredExport.release() );
addParameter( new QgsProcessingParameterFileDestination( QStringLiteral( "OUTPUT" ), QObject::tr( "PDF file" ), QObject::tr( "PDF Format" ) + " (*.pdf *.PDF)" ) );
}
QgsProcessingAlgorithm::Flags QgsLayoutToPdfAlgorithm::flags() const
{
return QgsProcessingAlgorithm::flags() | FlagNoThreading;
}
QgsLayoutToPdfAlgorithm *QgsLayoutToPdfAlgorithm::createInstance() const
{
return new QgsLayoutToPdfAlgorithm();
}
QVariantMap QgsLayoutToPdfAlgorithm::processAlgorithm( const QVariantMap ¶meters, QgsProcessingContext &context, QgsProcessingFeedback *feedback )
{
// this needs to be done in main thread, layouts are not thread safe
QgsPrintLayout *l = parameterAsLayout( parameters, QStringLiteral( "LAYOUT" ), context );
if ( !l )
throw QgsProcessingException( QObject::tr( "Cannot find layout with name \"%1\"" ).arg( parameters.value( QStringLiteral( "LAYOUT" ) ).toString() ) );
std::unique_ptr< QgsPrintLayout > layout( l->clone() );
const QList< QgsMapLayer * > layers = parameterAsLayerList( parameters, QStringLiteral( "LAYERS" ), context );
if ( layers.size() > 0 )
{
const QList<QGraphicsItem *> items = layout->items();
for ( QGraphicsItem *graphicsItem : items )
{
QgsLayoutItem *item = dynamic_cast<QgsLayoutItem *>( graphicsItem );
QgsLayoutItemMap *map = dynamic_cast<QgsLayoutItemMap *>( item );
if ( map && !map->followVisibilityPreset() && !map->keepLayerSet() )
{
map->setKeepLayerSet( true );
map->setLayers( layers );
}
}
}
const QString dest = parameterAsFileOutput( parameters, QStringLiteral( "OUTPUT" ), context );
QgsLayoutExporter exporter( layout.get() );
QgsLayoutExporter::PdfExportSettings settings;
if ( parameters.value( QStringLiteral( "DPI" ) ).isValid() )
{
settings.dpi = parameterAsDouble( parameters, QStringLiteral( "DPI" ), context );
}
settings.forceVectorOutput = parameterAsBool( parameters, QStringLiteral( "FORCE_VECTOR" ), context );
settings.appendGeoreference = parameterAsBool( parameters, QStringLiteral( "GEOREFERENCE" ), context );
settings.exportMetadata = parameterAsBool( parameters, QStringLiteral( "INCLUDE_METADATA" ), context );
settings.exportMetadata = parameterAsBool( parameters, QStringLiteral( "INCLUDE_METADATA" ), context );
settings.simplifyGeometries = parameterAsBool( parameters, QStringLiteral( "SIMPLIFY" ), context );
settings.textRenderFormat = parameterAsEnum( parameters, QStringLiteral( "TEXT_FORMAT" ), context ) == 0 ? QgsRenderContext::TextFormatAlwaysOutlines : QgsRenderContext::TextFormatAlwaysText;
settings.exportLayersAsSeperateFiles = parameterAsBool( parameters, QStringLiteral( "SEPARATE_LAYERS" ), context ); //#spellok
if ( parameterAsBool( parameters, QStringLiteral( "DISABLE_TILED" ), context ) )
settings.flags = settings.flags | QgsLayoutRenderContext::FlagDisableTiledRasterLayerRenders;
else
settings.flags = settings.flags & ~QgsLayoutRenderContext::FlagDisableTiledRasterLayerRenders;
switch ( exporter.exportToPdf( dest, settings ) )
{
case QgsLayoutExporter::Success:
{
feedback->pushInfo( QObject::tr( "Successfully exported layout to %1" ).arg( QDir::toNativeSeparators( dest ) ) );
break;
}
case QgsLayoutExporter::FileError:
throw QgsProcessingException( QObject::tr( "Cannot write to %1.\n\nThis file may be open in another application." ).arg( QDir::toNativeSeparators( dest ) ) );
case QgsLayoutExporter::PrintError:
throw QgsProcessingException( QObject::tr( "Could not create print device." ) );
case QgsLayoutExporter::MemoryError:
throw QgsProcessingException( QObject::tr( "Exporting the PDF "
"resulted in a memory overflow.\n\n"
"Please try a lower resolution or a smaller paper size." ) );
case QgsLayoutExporter::SvgLayerError:
case QgsLayoutExporter::IteratorError:
case QgsLayoutExporter::Canceled:<|fim▁hole|>
feedback->setProgress( 100 );
QVariantMap outputs;
outputs.insert( QStringLiteral( "OUTPUT" ), dest );
return outputs;
}
///@endcond<|fim▁end|>
|
// no meaning for PDF exports, will not be encountered
break;
}
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from api import views
urlpatterns = [
url(r'stations/$', views.get_stations, name='api_stations'),
url(r'entry/(?P<station_id>\d+)/$', views.make_entry, name='api_entry'),
url(r'new/$', views.add_station, name='api_add_station'),
# Booking api
url(r'booking/(?P<resident_id>\d+)/$', views.booking, name='api_booking'),
url(r'book_profile/$', views.book_profile, name='api_book_profile'),
url(r'book_phone/$', views.book_phone, name='api_book_phone'),
url(r'book_code/$', views.book_code, name='api_book_code'),
# Insure api
url(r'insure/$', views.insure, name='api_insure'),
# Drugshare api
url(r'register_pharm/$', views.register_pharm, name='api_register_pharm'),
url(r'make_token/(?P<device_id>\d+)/$',
views.make_token, name='api_make_token'),
url(r'add_device/$', views.add_device, name='api_add_device'),
url(r'get_profile/$', views.get_profile, name='api_get_profile'),
url(r'update_pharm/(?P<device_id>\d+)/$',
views.update_pharm, name='api_update_pharm'),
url(r'add_outlet/(?P<device_id>\d+)/$',
views.add_outlet, name='api_add_outlet'),
url(r'delete_outlet/(?P<id>\d+)/$',
views.delete_outlet, name='api_delete_outlet'),
url(r'add_drug/$', views.add_drug, name='api_add_drug'),
url(r'edit_drug/(?P<id>\d+)/$', views.edit_drug, name='api_edit_drug'),
url(r'search_drug/(?P<device_id>\d+)/$',
views.search_drug, name='api_search_drug'),
url(r'wish_drug/(?P<device_id>\d+)/$',
views.wishlist_drug, name='api_wishlist_drug'),
url(r'stock_drug/(?P<device_id>\d+)/$',
views.stock_drug, name='api_stock_drug'),
url(r'remove_drug/(?P<id>\d+)/$',
views.remove_drug, name='api_remove_drug'),
url(r'recent_drugs/(?P<count>\d+)/$',
views.recent_drugs, name='api_recent_drugs'),
url(r'request_drug/(?P<drug_id>\d+)/$',
views.request_drug, name='api_request_drug'),
url(r'pending/(?P<device_id>\d+)/$',<|fim▁hole|> views.pending_requests, name='api_pending_requests'),
url(r'accept/(?P<request_id>\d+)/$', views.accept, name='api_accept'),
url(r'reject/(?P<request_id>\d+)/$', views.reject, name='api_reject'),
url(r'drug_list/$', views.list_generic_drugs, name='api_drugs_list'),
url(r'feedback/(?P<id>\d+)/$', views.feedback, name='api_feedback'),
]<|fim▁end|>
| |
<|file_name|>resolved_type_def_function.rs<|end_file_name|><|fim▁begin|>#![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
pub type FuncType = ::std::option::Option<unsafe extern "C" fn()>;
extern "C" {
pub fn Func();<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>mrtrix.py<|end_file_name|><|fim▁begin|>##
# Copyright 2009-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing MRtrix, implemented as an easyblock
"""
import glob
import os
from distutils.version import LooseVersion
import easybuild.tools.environment as env
from easybuild.framework.easyblock import EasyBlock
from easybuild.tools.filetools import copy, symlink
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import get_shared_lib_ext
class EB_MRtrix(EasyBlock):
"""Support for building/installing MRtrix."""
def __init__(self, *args, **kwargs):
"""Initialize easyblock, enable build-in-installdir based on version."""
super(EB_MRtrix, self).__init__(*args, **kwargs)
if LooseVersion(self.version) >= LooseVersion('0.3') and LooseVersion(self.version) < LooseVersion('0.3.14'):
self.build_in_installdir = True
self.log.debug("Enabled build-in-installdir for version %s", self.version)
def extract_step(self):
"""Extract MRtrix sources."""
# strip off 'mrtrix*' part to avoid having everything in a 'mrtrix*' subdirectory
if LooseVersion(self.version) >= LooseVersion('0.3'):
self.cfg.update('unpack_options', '--strip-components=1')
super(EB_MRtrix, self).extract_step()
def configure_step(self):
"""No configuration step for MRtrix."""
if LooseVersion(self.version) >= LooseVersion('0.3'):
if LooseVersion(self.version) < LooseVersion('0.3.13'):
env.setvar('LD', "%s LDFLAGS OBJECTS -o EXECUTABLE" % os.getenv('CXX'))
env.setvar('LDLIB', "%s -shared LDLIB_FLAGS OBJECTS -o LIB" % os.getenv('CXX'))
env.setvar('QMAKE_CXX', os.getenv('CXX'))
cmd = "python configure -verbose"
run_cmd(cmd, log_all=True, simple=True, log_ok=True)
def build_step(self):
"""Custom build procedure for MRtrix."""
cmd = "python build -verbose"
run_cmd(cmd, log_all=True, simple=True, log_ok=True)
def install_step(self):
"""Custom install procedure for MRtrix."""
if LooseVersion(self.version) < LooseVersion('0.3'):
cmd = "python build -verbose install=%s linkto=" % self.installdir
run_cmd(cmd, log_all=True, simple=True, log_ok=True)
elif LooseVersion(self.version) >= LooseVersion('3.0'):
copy(os.path.join(self.builddir, 'bin'), self.installdir)
copy(os.path.join(self.builddir, 'lib'), self.installdir)
elif LooseVersion(self.version) >= LooseVersion('0.3.14'):
copy(glob.glob(os.path.join(self.builddir, 'release', '*')), self.installdir)
copy(os.path.join(self.builddir, 'scripts'), self.installdir)
# some scripts expect 'release/bin' to be there, so we put a symlink in place
symlink(self.installdir, os.path.join(self.installdir, 'release'))
def make_module_req_guess(self):
"""
Return list of subdirectories to consider to update environment variables;
also consider 'scripts' subdirectory for $PATH
"""
guesses = super(EB_MRtrix, self).make_module_req_guess()
guesses['PATH'].append('scripts')
if LooseVersion(self.version) >= LooseVersion('3.0'):
guesses.setdefault('PYTHONPATH', []).append('lib')
return guesses
def sanity_check_step(self):
"""Custom sanity check for MRtrix."""
shlib_ext = get_shared_lib_ext()
if LooseVersion(self.version) >= LooseVersion('0.3'):
libso = 'libmrtrix.%s' % shlib_ext
else:
libso = 'libmrtrix-%s.%s' % ('_'.join(self.version.split('.')), shlib_ext)
custom_paths = {
'files': [os.path.join('lib', libso)],
'dirs': ['bin'],
}
custom_commands = []
if LooseVersion(self.version) >= LooseVersion('3.0'):
custom_commands.append("python -c 'import mrtrix3'")
<|fim▁hole|><|fim▁end|>
|
super(EB_MRtrix, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
|
<|file_name|>events.js<|end_file_name|><|fim▁begin|>/* --------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* ------------------------------------------------------------------------------------------ */
'use strict';
/**
* Represents a type which can release resources, such
* as event listening or a timer.
*/
var Disposable = (function () {
function Disposable(callOnDispose) {
this._callOnDispose = callOnDispose;
}
/**
* Combine many disposable-likes into one. Use this method
* when having objects with a dispose function which are not
* instances of Disposable.
*
* @return Returns a new disposable which, upon dispose, will
* dispose all provides disposable-likes.
*/
Disposable.from = function () {
var disposables = [];
for (var _i = 0; _i < arguments.length; _i++) {
disposables[_i - 0] = arguments[_i];
}
return new Disposable(function () {
if (disposables) {
for (var _i = 0; _i < disposables.length; _i++) {
var disposable = disposables[_i];
disposable.dispose();
}
disposables = undefined;
}
});
};
/**
* Dispose this object.
*/
Disposable.prototype.dispose = function () {
if (typeof this._callOnDispose === 'function') {
this._callOnDispose();
this._callOnDispose = undefined;
}
};
return Disposable;
})();
exports.Disposable = Disposable;
var CallbackList = (function () {
function CallbackList() {
}
CallbackList.prototype.add = function (callback, context, bucket) {
var _this = this;
if (context === void 0) { context = null; }
if (!this._callbacks) {
this._callbacks = [];
this._contexts = [];
}
this._callbacks.push(callback);
this._contexts.push(context);
if (Array.isArray(bucket)) {
bucket.push({ dispose: function () { return _this.remove(callback, context); } });
}
};
CallbackList.prototype.remove = function (callback, context) {
if (context === void 0) { context = null; }
if (!this._callbacks) {
return;
}
var foundCallbackWithDifferentContext = false;
for (var i = 0, len = this._callbacks.length; i < len; i++) {
if (this._callbacks[i] === callback) {
if (this._contexts[i] === context) {
// callback & context match => remove it
this._callbacks.splice(i, 1);
this._contexts.splice(i, 1);
return;
}
else {
foundCallbackWithDifferentContext = true;
}
}
}
if (foundCallbackWithDifferentContext) {
throw new Error('When adding a listener with a context, you should remove it with the same context');
}
};
CallbackList.prototype.invoke = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i - 0] = arguments[_i];
}
if (!this._callbacks) {
return;
}
var ret = [], callbacks = this._callbacks.slice(0), contexts = this._contexts.slice(0);
for (var i = 0, len = callbacks.length; i < len; i++) {
try {
ret.push(callbacks[i].apply(contexts[i], args));
}
catch (e) {
console.error(e);
}
}
return ret;
};
CallbackList.prototype.isEmpty = function () {
return !this._callbacks || this._callbacks.length === 0;
};
CallbackList.prototype.dispose = function () {
this._callbacks = undefined;
this._contexts = undefined;
};
return CallbackList;
})();
exports.CallbackList = CallbackList;
var Emitter = (function () {
function Emitter(_options) {
this._options = _options;
}
Object.defineProperty(Emitter.prototype, "event", {
/**
* For the public to allow to subscribe
* to events from this Emitter<|fim▁hole|> var _this = this;
if (!this._event) {
this._event = function (listener, thisArgs, disposables) {
if (!_this._callbacks) {
_this._callbacks = new CallbackList();
}
if (_this._options && _this._options.onFirstListenerAdd && _this._callbacks.isEmpty()) {
_this._options.onFirstListenerAdd(_this);
}
_this._callbacks.add(listener, thisArgs);
var result;
result = {
dispose: function () {
_this._callbacks.remove(listener, thisArgs);
result.dispose = Emitter._noop;
if (_this._options && _this._options.onLastListenerRemove && _this._callbacks.isEmpty()) {
_this._options.onLastListenerRemove(_this);
}
}
};
if (Array.isArray(disposables)) {
disposables.push(result);
}
return result;
};
}
return this._event;
},
enumerable: true,
configurable: true
});
/**
* To be kept private to fire an event to
* subscribers
*/
Emitter.prototype.fire = function (event) {
if (this._callbacks) {
this._callbacks.invoke.call(this._callbacks, event);
}
};
Emitter.prototype.dispose = function () {
if (this._callbacks) {
this._callbacks.dispose();
this._callbacks = undefined;
}
};
Emitter._noop = function () { };
return Emitter;
})();
exports.Emitter = Emitter;<|fim▁end|>
|
*/
get: function () {
|
<|file_name|>ec2_asg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = """
---
module: ec2_asg
short_description: Create or delete AWS Autoscaling Groups
description:
- Can create or delete AWS Autoscaling Groups
- Works with the ec2_lc module to manage Launch Configurations
version_added: "1.6"
author: "Gareth Rushgrove (@garethr)"
options:
state:
description:
- register or deregister the instance
required: false
choices: ['present', 'absent']
default: present
name:
description:
- Unique name for group to be created or deleted
required: true
load_balancers:
description:
- List of ELB names to use for the group
required: false
availability_zones:
description:
- List of availability zone names in which to create the group. Defaults to all the availability zones in the region if vpc_zone_identifier is not set.
required: false
launch_config_name:
description:
- Name of the Launch configuration to use for the group. See the ec2_lc module for managing these.
required: true
min_size:
description:
- Minimum number of instances in group, if unspecified then the current group value will be used.
required: false
max_size:
description:
- Maximum number of instances in group, if unspecified then the current group value will be used.
required: false
placement_group:
description:
- Physical location of your cluster placement group created in Amazon EC2.
required: false
version_added: "2.3"
default: None
desired_capacity:
description:
- Desired number of instances in group, if unspecified then the current group value will be used.
required: false
replace_all_instances:
description:
- In a rolling fashion, replace all instances with an old launch configuration with one from the current launch configuration.
required: false
version_added: "1.8"
default: False
replace_batch_size:
description:
- Number of instances you'd like to replace at a time. Used with replace_all_instances.
required: false
version_added: "1.8"
default: 1
replace_instances:
description:
- List of instance_ids belonging to the named ASG that you would like to terminate and be replaced with instances matching the current launch configuration.
required: false
version_added: "1.8"
default: None
lc_check:
description:
- Check to make sure instances that are being replaced with replace_instances do not already have the current launch_config.
required: false
version_added: "1.8"
default: True
vpc_zone_identifier:
description:
- List of VPC subnets to use
required: false
default: None
tags:
description:
- A list of tags to add to the Auto Scale Group. Optional key is 'propagate_at_launch', which defaults to true.
required: false
default: None
version_added: "1.7"
health_check_period:
description:
- Length of time in seconds after a new EC2 instance comes into service that Auto Scaling starts checking its health.
required: false
default: 500 seconds
version_added: "1.7"
health_check_type:
description:
- The service you want the health status from, Amazon EC2 or Elastic Load Balancer.
required: false
default: EC2
version_added: "1.7"
choices: ['EC2', 'ELB']
default_cooldown:
description:
- The number of seconds after a scaling activity completes before another can begin.
required: false
default: 300 seconds
version_added: "2.0"
wait_timeout:
description:
- how long before wait instances to become viable when replaced. Used in conjunction with instance_ids option.
default: 300
version_added: "1.8"
wait_for_instances:
description:
- Wait for the ASG instances to be in a ready state before exiting. If instances are behind an ELB, it will wait until the ELB determines all instances have a lifecycle_state of "InService" and a health_status of "Healthy".
version_added: "1.9"
default: yes
required: False
termination_policies:
description:
- An ordered list of criteria used for selecting instances to be removed from the Auto Scaling group when reducing capacity.
- For 'Default', when used to create a new autoscaling group, the "Default"i value is used. When used to change an existent autoscaling group, the current termination policies are maintained.
required: false
default: Default
choices: ['OldestInstance', 'NewestInstance', 'OldestLaunchConfiguration', 'ClosestToNextInstanceHour', 'Default']
version_added: "2.0"
notification_topic:
description:
- A SNS topic ARN to send auto scaling notifications to.
default: None
required: false
version_added: "2.2"
notification_types:
description:
- A list of auto scaling events to trigger notifications on.
default: ['autoscaling:EC2_INSTANCE_LAUNCH', 'autoscaling:EC2_INSTANCE_LAUNCH_ERROR', 'autoscaling:EC2_INSTANCE_TERMINATE', 'autoscaling:EC2_INSTANCE_TERMINATE_ERROR']
required: false
version_added: "2.2"
suspend_processes:
description:
- A list of scaling processes to suspend.
required: False
default: []
choices: ['Launch', 'Terminate', 'HealthCheck', 'ReplaceUnhealthy', 'AZRebalance', 'AlarmNotification', 'ScheduledActions', 'AddToLoadBalancer']
version_added: "2.3"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = '''
# Basic configuration
- ec2_asg:
name: special
load_balancers: [ 'lb1', 'lb2' ]
availability_zones: [ 'eu-west-1a', 'eu-west-1b' ]
launch_config_name: 'lc-1'
min_size: 1
max_size: 10
desired_capacity: 5
vpc_zone_identifier: [ 'subnet-abcd1234', 'subnet-1a2b3c4d' ]
tags:
- environment: production
propagate_at_launch: no
# Rolling ASG Updates
Below is an example of how to assign a new launch config to an ASG and terminate old instances.
All instances in "myasg" that do not have the launch configuration named "my_new_lc" will be terminated in
a rolling fashion with instances using the current launch configuration, "my_new_lc".
This could also be considered a rolling deploy of a pre-baked AMI.
If this is a newly created group, the instances will not be replaced since all instances
will have the current launch configuration.
- name: create launch config
ec2_lc:
name: my_new_lc
image_id: ami-lkajsf
key_name: mykey
region: us-east-1
security_groups: sg-23423
instance_type: m1.small
assign_public_ip: yes
- ec2_asg:
name: myasg
launch_config_name: my_new_lc
health_check_period: 60
health_check_type: ELB
replace_all_instances: yes
min_size: 5
max_size: 5
desired_capacity: 5
region: us-east-1
To only replace a couple of instances instead of all of them, supply a list
to "replace_instances":
- ec2_asg:
name: myasg
launch_config_name: my_new_lc
health_check_period: 60
health_check_type: ELB
replace_instances:
- i-b345231
- i-24c2931
min_size: 5
max_size: 5
desired_capacity: 5
region: us-east-1
'''
import time
import logging as log
import traceback
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
log.getLogger('boto').setLevel(log.CRITICAL)
#log.basicConfig(filename='/tmp/ansible_ec2_asg.log',level=log.DEBUG, format='%(asctime)s: %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
try:
import boto.ec2.autoscale
from boto.ec2.autoscale import AutoScaleConnection, AutoScalingGroup, Tag
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
ASG_ATTRIBUTES = ('availability_zones', 'default_cooldown', 'desired_capacity',
'health_check_period', 'health_check_type', 'launch_config_name',
'load_balancers', 'max_size', 'min_size', 'name', 'placement_group',
'termination_policies', 'vpc_zone_identifier')
INSTANCE_ATTRIBUTES = ('instance_id', 'health_status', 'lifecycle_state', 'launch_config_name')
def enforce_required_arguments(module):
''' As many arguments are not required for autoscale group deletion
they cannot be mandatory arguments for the module, so we enforce
them here '''
missing_args = []
for arg in ('min_size', 'max_size', 'launch_config_name'):
if module.params[arg] is None:
missing_args.append(arg)
if missing_args:
module.fail_json(msg="Missing required arguments for autoscaling group create/update: %s" % ",".join(missing_args))
def get_properties(autoscaling_group):
properties = dict((attr, getattr(autoscaling_group, attr)) for attr in ASG_ATTRIBUTES)
# Ugly hack to make this JSON-serializable. We take a list of boto Tag
# objects and replace them with a dict-representation. Needed because the
# tags are included in ansible's return value (which is jsonified)
if 'tags' in properties and isinstance(properties['tags'], list):
serializable_tags = {}
for tag in properties['tags']:
serializable_tags[tag.key] = [tag.value, tag.propagate_at_launch]
properties['tags'] = serializable_tags
properties['healthy_instances'] = 0
properties['in_service_instances'] = 0
properties['unhealthy_instances'] = 0
properties['pending_instances'] = 0
properties['viable_instances'] = 0
properties['terminating_instances'] = 0
instance_facts = {}
if autoscaling_group.instances:
properties['instances'] = [i.instance_id for i in autoscaling_group.instances]
for i in autoscaling_group.instances:
instance_facts[i.instance_id] = {'health_status': i.health_status,
'lifecycle_state': i.lifecycle_state,
'launch_config_name': i.launch_config_name }
if i.health_status == 'Healthy' and i.lifecycle_state == 'InService':
properties['viable_instances'] += 1
if i.health_status == 'Healthy':
properties['healthy_instances'] += 1
else:
properties['unhealthy_instances'] += 1
if i.lifecycle_state == 'InService':
properties['in_service_instances'] += 1
if i.lifecycle_state == 'Terminating':
properties['terminating_instances'] += 1
if i.lifecycle_state == 'Pending':
properties['pending_instances'] += 1
properties['instance_facts'] = instance_facts
properties['load_balancers'] = autoscaling_group.load_balancers
if getattr(autoscaling_group, "tags", None):
properties['tags'] = dict((t.key, t.value) for t in autoscaling_group.tags)
return properties
def elb_dreg(asg_connection, module, group_name, instance_id):
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
as_group = asg_connection.get_all_groups(names=[group_name])[0]
wait_timeout = module.params.get('wait_timeout')
props = get_properties(as_group)
count = 1
if as_group.load_balancers and as_group.health_check_type == 'ELB':
try:
elb_connection = connect_to_aws(boto.ec2.elb, region, **aws_connect_params)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
else:
return
for lb in as_group.load_balancers:
elb_connection.deregister_instances(lb, instance_id)
log.debug("De-registering {0} from ELB {1}".format(instance_id, lb))
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and count > 0:
count = 0
for lb in as_group.load_balancers:
lb_instances = elb_connection.describe_instance_health(lb)
for i in lb_instances:
if i.instance_id == instance_id and i.state == "InService":
count += 1
log.debug("{0}: {1}, {2}".format(i.instance_id, i.state, i.description))
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "Waited too long for instance to deregister. {0}".format(time.asctime()))
def elb_healthy(asg_connection, elb_connection, module, group_name):
healthy_instances = set()
as_group = asg_connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
# get healthy, inservice instances from ASG
instances = []
for instance, settings in props['instance_facts'].items():
if settings['lifecycle_state'] == 'InService' and settings['health_status'] == 'Healthy':
instances.append(instance)
log.debug("ASG considers the following instances InService and Healthy: {0}".format(instances))
log.debug("ELB instance status:")
for lb in as_group.load_balancers:
# we catch a race condition that sometimes happens if the instance exists in the ASG
# but has not yet show up in the ELB
try:
lb_instances = elb_connection.describe_instance_health(lb, instances=instances)
except boto.exception.BotoServerError as e:
if e.error_code == 'InvalidInstance':
return None
module.fail_json(msg=str(e))
for i in lb_instances:
if i.state == "InService":
healthy_instances.add(i.instance_id)
log.debug("{0}: {1}".format(i.instance_id, i.state))
return len(healthy_instances)
def wait_for_elb(asg_connection, module, group_name):
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
wait_timeout = module.params.get('wait_timeout')
# if the health_check_type is ELB, we want to query the ELBs directly for instance
# status as to avoid health_check_grace period that is awarded to ASG instances
as_group = asg_connection.get_all_groups(names=[group_name])[0]
if as_group.load_balancers and as_group.health_check_type == 'ELB':
log.debug("Waiting for ELB to consider instances healthy.")
try:
elb_connection = connect_to_aws(boto.ec2.elb, region, **aws_connect_params)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
wait_timeout = time.time() + wait_timeout
healthy_instances = elb_healthy(asg_connection, elb_connection, module, group_name)
while healthy_instances < as_group.min_size and wait_timeout > time.time():
healthy_instances = elb_healthy(asg_connection, elb_connection, module, group_name)
log.debug("ELB thinks {0} instances are healthy.".format(healthy_instances))
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "Waited too long for ELB instances to be healthy. %s" % time.asctime())
log.debug("Waiting complete. ELB thinks {0} instances are healthy.".format(healthy_instances))
def suspend_processes(as_group, module):
suspend_processes = set(module.params.get('suspend_processes'))
try:
suspended_processes = set([p.process_name for p in as_group.suspended_processes])
except AttributeError:
# New ASG being created, no suspended_processes defined yet
suspended_processes = set()
if suspend_processes == suspended_processes:
return False
resume_processes = list(suspended_processes - suspend_processes)
if resume_processes:
as_group.resume_processes(resume_processes)
if suspend_processes:
as_group.suspend_processes(list(suspend_processes))
return True
def create_autoscaling_group(connection, module):
group_name = module.params.get('name')
load_balancers = module.params['load_balancers']
availability_zones = module.params['availability_zones']
launch_config_name = module.params.get('launch_config_name')
min_size = module.params['min_size']
max_size = module.params['max_size']
placement_group = module.params.get('placement_group')
desired_capacity = module.params.get('desired_capacity')
vpc_zone_identifier = module.params.get('vpc_zone_identifier')
set_tags = module.params.get('tags')
health_check_period = module.params.get('health_check_period')
health_check_type = module.params.get('health_check_type')
default_cooldown = module.params.get('default_cooldown')
wait_for_instances = module.params.get('wait_for_instances')
as_groups = connection.get_all_groups(names=[group_name])
wait_timeout = module.params.get('wait_timeout')
termination_policies = module.params.get('termination_policies')
notification_topic = module.params.get('notification_topic')
notification_types = module.params.get('notification_types')
if not vpc_zone_identifier and not availability_zones:
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
try:
ec2_connection = connect_to_aws(boto.ec2, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
elif vpc_zone_identifier:
vpc_zone_identifier = ','.join(vpc_zone_identifier)
asg_tags = []
for tag in set_tags:
for k,v in tag.items():
if k !='propagate_at_launch':
asg_tags.append(Tag(key=k,
value=v,
propagate_at_launch=bool(tag.get('propagate_at_launch', True)),
resource_id=group_name))
if not as_groups:
if not vpc_zone_identifier and not availability_zones:
availability_zones = module.params['availability_zones'] = [zone.name for zone in ec2_connection.get_all_zones()]
enforce_required_arguments(module)
launch_configs = connection.get_all_launch_configurations(names=[launch_config_name])
if len(launch_configs) == 0:
module.fail_json(msg="No launch config found with name %s" % launch_config_name)
ag = AutoScalingGroup(
group_name=group_name,
load_balancers=load_balancers,
availability_zones=availability_zones,
launch_config=launch_configs[0],
min_size=min_size,
max_size=max_size,
placement_group=placement_group,
desired_capacity=desired_capacity,
vpc_zone_identifier=vpc_zone_identifier,
connection=connection,
tags=asg_tags,
health_check_period=health_check_period,
health_check_type=health_check_type,
default_cooldown=default_cooldown,
termination_policies=termination_policies)
try:
connection.create_auto_scaling_group(ag)
suspend_processes(ag, module)
if wait_for_instances:
wait_for_new_inst(module, connection, group_name, wait_timeout, desired_capacity, 'viable_instances')
wait_for_elb(connection, module, group_name)
if notification_topic:
ag.put_notification_configuration(notification_topic, notification_types)
as_group = connection.get_all_groups(names=[group_name])[0]
asg_properties = get_properties(as_group)
changed = True
return(changed, asg_properties)
except BotoServerError as e:
module.fail_json(msg="Failed to create Autoscaling Group: %s" % str(e), exception=traceback.format_exc(e))
else:
as_group = as_groups[0]
changed = False
if suspend_processes(as_group, module):
changed = True
for attr in ASG_ATTRIBUTES:
if module.params.get(attr, None) is not None:
module_attr = module.params.get(attr)
if attr == 'vpc_zone_identifier':
module_attr = ','.join(module_attr)
group_attr = getattr(as_group, attr)
# we do this because AWS and the module may return the same list
# sorted differently
if attr != 'termination_policies':
try:
module_attr.sort()
except:
pass
try:
group_attr.sort()
except:
pass
if group_attr != module_attr:
changed = True
setattr(as_group, attr, module_attr)
if len(set_tags) > 0:
have_tags = {}
want_tags = {}
for tag in asg_tags:
want_tags[tag.key] = [tag.value, tag.propagate_at_launch]
dead_tags = []
for tag in as_group.tags:
have_tags[tag.key] = [tag.value, tag.propagate_at_launch]
if tag.key not in want_tags:
changed = True
dead_tags.append(tag)
if dead_tags != []:
connection.delete_tags(dead_tags)
if have_tags != want_tags:
changed = True
connection.create_or_update_tags(asg_tags)
# handle loadbalancers separately because None != []
load_balancers = module.params.get('load_balancers') or []
if load_balancers and as_group.load_balancers != load_balancers:
changed = True
as_group.load_balancers = module.params.get('load_balancers')
if changed:
try:
as_group.update()
except BotoServerError as e:
module.fail_json(msg="Failed to update Autoscaling Group: %s" % str(e), exception=traceback.format_exc(e))
if notification_topic:
try:
as_group.put_notification_configuration(notification_topic, notification_types)
except BotoServerError as e:
module.fail_json(msg="Failed to update Autoscaling Group notifications: %s" % str(e), exception=traceback.format_exc(e))
if wait_for_instances:
wait_for_new_inst(module, connection, group_name, wait_timeout, desired_capacity, 'viable_instances')
wait_for_elb(connection, module, group_name)
try:
as_group = connection.get_all_groups(names=[group_name])[0]
asg_properties = get_properties(as_group)
except BotoServerError as e:
module.fail_json(msg="Failed to read existing Autoscaling Groups: %s" % str(e), exception=traceback.format_exc(e))
return(changed, asg_properties)<|fim▁hole|>def delete_autoscaling_group(connection, module):
group_name = module.params.get('name')
notification_topic = module.params.get('notification_topic')
if notification_topic:
ag.delete_notification_configuration(notification_topic)
groups = connection.get_all_groups(names=[group_name])
if groups:
group = groups[0]
group.max_size = 0
group.min_size = 0
group.desired_capacity = 0
group.update()
instances = True
while instances:
tmp_groups = connection.get_all_groups(names=[group_name])
if tmp_groups:
tmp_group = tmp_groups[0]
if not tmp_group.instances:
instances = False
time.sleep(10)
group.delete()
while len(connection.get_all_groups(names=[group_name])):
time.sleep(5)
changed=True
return changed
else:
changed=False
return changed
def get_chunks(l, n):
for i in xrange(0, len(l), n):
yield l[i:i+n]
def update_size(group, max_size, min_size, dc):
log.debug("setting ASG sizes")
log.debug("minimum size: {0}, desired_capacity: {1}, max size: {2}".format(min_size, dc, max_size ))
group.max_size = max_size
group.min_size = min_size
group.desired_capacity = dc
group.update()
def replace(connection, module):
batch_size = module.params.get('replace_batch_size')
wait_timeout = module.params.get('wait_timeout')
group_name = module.params.get('name')
max_size = module.params.get('max_size')
min_size = module.params.get('min_size')
desired_capacity = module.params.get('desired_capacity')
lc_check = module.params.get('lc_check')
replace_instances = module.params.get('replace_instances')
as_group = connection.get_all_groups(names=[group_name])[0]
wait_for_new_inst(module, connection, group_name, wait_timeout, as_group.min_size, 'viable_instances')
props = get_properties(as_group)
instances = props['instances']
if replace_instances:
instances = replace_instances
#check if min_size/max_size/desired capacity have been specified and if not use ASG values
if min_size is None:
min_size = as_group.min_size
if max_size is None:
max_size = as_group.max_size
if desired_capacity is None:
desired_capacity = as_group.desired_capacity
# check to see if instances are replaceable if checking launch configs
new_instances, old_instances = get_instances_by_lc(props, lc_check, instances)
num_new_inst_needed = desired_capacity - len(new_instances)
if lc_check:
if num_new_inst_needed == 0 and old_instances:
log.debug("No new instances needed, but old instances are present. Removing old instances")
terminate_batch(connection, module, old_instances, instances, True)
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
changed = True
return(changed, props)
# we don't want to spin up extra instances if not necessary
if num_new_inst_needed < batch_size:
log.debug("Overriding batch size to {0}".format(num_new_inst_needed))
batch_size = num_new_inst_needed
if not old_instances:
changed = False
return(changed, props)
# set temporary settings and wait for them to be reached
# This should get overwritten if the number of instances left is less than the batch size.
as_group = connection.get_all_groups(names=[group_name])[0]
update_size(as_group, max_size + batch_size, min_size + batch_size, desired_capacity + batch_size)
wait_for_new_inst(module, connection, group_name, wait_timeout, as_group.min_size, 'viable_instances')
wait_for_elb(connection, module, group_name)
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
instances = props['instances']
if replace_instances:
instances = replace_instances
log.debug("beginning main loop")
for i in get_chunks(instances, batch_size):
# break out of this loop if we have enough new instances
break_early, desired_size, term_instances = terminate_batch(connection, module, i, instances, False)
wait_for_term_inst(connection, module, term_instances)
wait_for_new_inst(module, connection, group_name, wait_timeout, desired_size, 'viable_instances')
wait_for_elb(connection, module, group_name)
as_group = connection.get_all_groups(names=[group_name])[0]
if break_early:
log.debug("breaking loop")
break
update_size(as_group, max_size, min_size, desired_capacity)
as_group = connection.get_all_groups(names=[group_name])[0]
asg_properties = get_properties(as_group)
log.debug("Rolling update complete.")
changed=True
return(changed, asg_properties)
def get_instances_by_lc(props, lc_check, initial_instances):
new_instances = []
old_instances = []
# old instances are those that have the old launch config
if lc_check:
for i in props['instances']:
if props['instance_facts'][i]['launch_config_name'] == props['launch_config_name']:
new_instances.append(i)
else:
old_instances.append(i)
else:
log.debug("Comparing initial instances with current: {0}".format(initial_instances))
for i in props['instances']:
if i not in initial_instances:
new_instances.append(i)
else:
old_instances.append(i)
log.debug("New instances: {0}, {1}".format(len(new_instances), new_instances))
log.debug("Old instances: {0}, {1}".format(len(old_instances), old_instances))
return new_instances, old_instances
def list_purgeable_instances(props, lc_check, replace_instances, initial_instances):
instances_to_terminate = []
instances = ( inst_id for inst_id in replace_instances if inst_id in props['instances'])
# check to make sure instances given are actually in the given ASG
# and they have a non-current launch config
if lc_check:
for i in instances:
if props['instance_facts'][i]['launch_config_name'] != props['launch_config_name']:
instances_to_terminate.append(i)
else:
for i in instances:
if i in initial_instances:
instances_to_terminate.append(i)
return instances_to_terminate
def terminate_batch(connection, module, replace_instances, initial_instances, leftovers=False):
batch_size = module.params.get('replace_batch_size')
min_size = module.params.get('min_size')
desired_capacity = module.params.get('desired_capacity')
group_name = module.params.get('name')
wait_timeout = int(module.params.get('wait_timeout'))
lc_check = module.params.get('lc_check')
decrement_capacity = False
break_loop = False
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
desired_size = as_group.min_size
new_instances, old_instances = get_instances_by_lc(props, lc_check, initial_instances)
num_new_inst_needed = desired_capacity - len(new_instances)
# check to make sure instances given are actually in the given ASG
# and they have a non-current launch config
instances_to_terminate = list_purgeable_instances(props, lc_check, replace_instances, initial_instances)
log.debug("new instances needed: {0}".format(num_new_inst_needed))
log.debug("new instances: {0}".format(new_instances))
log.debug("old instances: {0}".format(old_instances))
log.debug("batch instances: {0}".format(",".join(instances_to_terminate)))
if num_new_inst_needed == 0:
decrement_capacity = True
if as_group.min_size != min_size:
as_group.min_size = min_size
as_group.update()
log.debug("Updating minimum size back to original of {0}".format(min_size))
#if are some leftover old instances, but we are already at capacity with new ones
# we don't want to decrement capacity
if leftovers:
decrement_capacity = False
break_loop = True
instances_to_terminate = old_instances
desired_size = min_size
log.debug("No new instances needed")
if num_new_inst_needed < batch_size and num_new_inst_needed !=0 :
instances_to_terminate = instances_to_terminate[:num_new_inst_needed]
decrement_capacity = False
break_loop = False
log.debug("{0} new instances needed".format(num_new_inst_needed))
log.debug("decrementing capacity: {0}".format(decrement_capacity))
for instance_id in instances_to_terminate:
elb_dreg(connection, module, group_name, instance_id)
log.debug("terminating instance: {0}".format(instance_id))
connection.terminate_instance(instance_id, decrement_capacity=decrement_capacity)
# we wait to make sure the machines we marked as Unhealthy are
# no longer in the list
return break_loop, desired_size, instances_to_terminate
def wait_for_term_inst(connection, module, term_instances):
batch_size = module.params.get('replace_batch_size')
wait_timeout = module.params.get('wait_timeout')
group_name = module.params.get('name')
lc_check = module.params.get('lc_check')
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
count = 1
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and count > 0:
log.debug("waiting for instances to terminate")
count = 0
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
instance_facts = props['instance_facts']
instances = ( i for i in instance_facts if i in term_instances)
for i in instances:
lifecycle = instance_facts[i]['lifecycle_state']
health = instance_facts[i]['health_status']
log.debug("Instance {0} has state of {1},{2}".format(i,lifecycle,health ))
if lifecycle == 'Terminating' or health == 'Unhealthy':
count += 1
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "Waited too long for old instances to terminate. %s" % time.asctime())
def wait_for_new_inst(module, connection, group_name, wait_timeout, desired_size, prop):
# make sure we have the latest stats after that last loop.
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
log.debug("Waiting for {0} = {1}, currently {2}".format(prop, desired_size, props[prop]))
# now we make sure that we have enough instances in a viable state
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and desired_size > props[prop]:
log.debug("Waiting for {0} = {1}, currently {2}".format(prop, desired_size, props[prop]))
time.sleep(10)
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "Waited too long for new instances to become viable. %s" % time.asctime())
log.debug("Reached {0}: {1}".format(prop, desired_size))
return props
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True, type='str'),
load_balancers=dict(type='list'),
availability_zones=dict(type='list'),
launch_config_name=dict(type='str'),
min_size=dict(type='int'),
max_size=dict(type='int'),
placement_group=dict(type='str'),
desired_capacity=dict(type='int'),
vpc_zone_identifier=dict(type='list'),
replace_batch_size=dict(type='int', default=1),
replace_all_instances=dict(type='bool', default=False),
replace_instances=dict(type='list', default=[]),
lc_check=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=300),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(type='list', default=[]),
health_check_period=dict(type='int', default=300),
health_check_type=dict(default='EC2', choices=['EC2', 'ELB']),
default_cooldown=dict(type='int', default=300),
wait_for_instances=dict(type='bool', default=True),
termination_policies=dict(type='list', default='Default'),
notification_topic=dict(type='str', default=None),
notification_types=dict(type='list', default=[
'autoscaling:EC2_INSTANCE_LAUNCH',
'autoscaling:EC2_INSTANCE_LAUNCH_ERROR',
'autoscaling:EC2_INSTANCE_TERMINATE',
'autoscaling:EC2_INSTANCE_TERMINATE_ERROR'
]),
suspend_processes=dict(type='list', default=[])
),
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive = [['replace_all_instances', 'replace_instances']]
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
replace_instances = module.params.get('replace_instances')
replace_all_instances = module.params.get('replace_all_instances')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
try:
connection = connect_to_aws(boto.ec2.autoscale, region, **aws_connect_params)
if not connection:
module.fail_json(msg="failed to connect to AWS for the given region: %s" % str(region))
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
changed = create_changed = replace_changed = False
if state == 'present':
create_changed, asg_properties=create_autoscaling_group(connection, module)
elif state == 'absent':
changed = delete_autoscaling_group(connection, module)
module.exit_json( changed = changed )
if replace_all_instances or replace_instances:
replace_changed, asg_properties=replace(connection, module)
if create_changed or replace_changed:
changed = True
module.exit_json( changed = changed, **asg_properties )
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>slide4.py<|end_file_name|><|fim▁begin|>__author__ = 'ramuta'
a = 1
b = 2
if a < b:
a = b
print a
print b
"""
Java equivalent
if (a < b) {
a = b;
}
If you delete parenthesis, brackets and semicolons you get python.
<|fim▁hole|><|fim▁end|>
|
"""
|
<|file_name|>mpstest7.py<|end_file_name|><|fim▁begin|>"""
mpstest7.py
A test of manipulating matrix product states with numpy.
2014-08-25
"""<|fim▁hole|>import matplotlib.pyplot as plt
from cmath import *
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
def main():
test3()
def test3():
""" Test MPS conversion functions by computing fidelity between
generated MPS and orginal, with new and old bond dimensions
chi0 and chi1 varied.
"""
print("*** Started testing MPS ***")
N = 5
d = 2
# Points to plot on 3d graph
(X,Y,Z) = ([],[],[])
for chi0 in xrange(1,8):
for chi1 in xrange(1,8):
F = 0
# Run random test for 20 points and take average fidelity
for i in xrange(20):
mps0 = randomMPS(N,chi0,d) # Make random MPS
state0 = getState(mps0) # Convert to state
mps1 = getMPS(state0,chi1) # Convert back to MPS with new bond dimension
state1 = getState(mps1) # Convert back to state
F += fidelityMPS(mps0,mps1) # Compute fidelity and add to sum
# F += fidelity(state0,state1) # Uncomment this to try with vectors
X.append(chi0)
Y.append(chi1)
Z.append(F/20)
X = np.array(X)
Y = np.array(Y)
Z = np.array(Z)
# Plot the surface
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_trisurf(X, Y, Z, cmap=cm.jet, linewidth=0.2)
ax.set_xlabel('chi0')
ax.set_ylabel('chi1')
ax.set_zlabel('fidelity')
plt.show()
print("*** Finished testing MPS ***")
def fidelityMPS(A,B):
""" Fidelity of two MPS representations
f = <A|B><B|A>/(<A|A><B|B>).
"""
return innerProduct(A,B)*innerProduct(B,A)\
/innerProduct(A,A)/innerProduct(B,B)
def fidelity(a,b):
""" Fidelity of two state vectors
f = <a|b><b|a>/(<a|a><b|b>).
"""
return np.inner(np.conj(a),b)*np.inner(np.conj(b),a)\
/np.inner(np.conj(a),a)/np.inner(np.conj(b),b)
def randomMPS(N,chi,d):
""" Returns a random MPS given parameters N, chi, d."""
A = []
for i in xrange(N):
# Each real part of each value varies between -0.5 and 0.5.
A.append((np.random.rand(chi,d,chi)-.5)+1j*(np.random.rand(chi,d,chi)-.5))
return np.array(A)
def getState(A):
""" State vector of a MPS by contracting MPS."""
N = len(A) # Number of spins
chi = A[0].shape[0] # Bond dimension
d = A[0].shape[1] # d = 2 for qubits
c = A[0]
for i in xrange(1,N):
c = np.tensordot(c,A[i],axes=(-1,0))
c = np.trace(c,axis1=0,axis2=-1)
return np.reshape(c,d**N)
def getMPS(state,chi):
""" MPS of a state."""
d = 2 # Qubits have 2 states each
N = int(np.log2(len(state))) # Number of qubits
c = np.reshape(state,cShape(d,N)) # State amplitudes tensor c.
A = [] # List of N matrices of MPS, each of shape (chi,d,chi)
# Start left end with a vector of size (d,chi)
c = np.reshape(c,(d,d**(N-1))) # Reshape c
(ap,sv,c) = np.linalg.svd(c) # Apply SVD
s = np.zeros((d,chi),dtype=complex) # Construct singular value matrix shape
s[:d,:d] = np.diag(sv[:chi]) # Fill s with singular values
# Trim c or fill rest of c with zeros
newc = np.zeros((chi,d**(N-1)),dtype=complex)
newc[:min(chi,d**(N-1)),:] = c[:chi,:]
c = newc
A.append(np.tensordot(ap,s,axes=(-1,0))) # Contract and append to A
# Sweep through the middle, creating matrix products each with
# shape (chi,d,chi)
for i in xrange(1,N-2):
c = np.reshape(c,(d*chi,d**(N-i-1)))
(ap,sv,c) = np.linalg.svd(c)
s = np.zeros((d*chi,chi),dtype=complex)
s[:min(chi,len(sv)),:min(chi,len(sv))] = np.diag(sv[:chi])
A.append(np.reshape(np.dot(ap,s),(chi,d,chi)))
newc = np.zeros((chi,d**(N-i-1)),dtype=complex)
newc[:min(chi,len(sv)),:] = c[:chi,:]
c = newc
# Finish right end with the remaining vector
c = np.reshape(c,(d*chi,d))
(ap,sv,c) = np.linalg.svd(c)
s = np.zeros((chi,d),dtype=complex)
s[:d,:d] = np.diag(sv[:chi])
A.append(np.reshape(ap[:chi,:],(chi,d,chi)))
c = np.dot(s,c)
A.append(c)
# Fix up ends by filling first row of correctly shaped zeros with
# end vectors such that the trace is preserved.
start = np.zeros((chi,d,chi),dtype=complex)
start[0,:,:] = A[0]
A[0] = start
finish = np.zeros((chi,d,chi),dtype=complex)
finish[:,:,0] = A[-1]
A[-1] = finish
# Return MPS as numpy array with shape (N,chi,d,chi)
return np.array(A)
def innerProduct(A,B):
""" Inner product <A|B> using transfer matrices
where A and B are MPS representations of }A> and }B>.
"""
N = len(A) # Number of qubits
chiA = A.shape[1] # bond dimension of MPS in A
chiB = B.shape[1] # bond dimension of MPS in B
d = A.shape[2] # d = 2 for qubits
# Take adjoint of |A> to get <A|
A = np.conj(A)
# Construct list of transfer matrices by contracting pairs of
# tensors from A and B.
transfer = []
for i in xrange(N):
t = np.tensordot(A[i],B[i],axes=(1,1))
t = np.transpose(t,axes=(0,2,1,3))
t = np.reshape(t,(chiA*chiB,chiA*chiB))
transfer.append(t)
# Contract the transfer matrices.
prod = transfer[0]
for i in xrange(1,len(transfer)):
prod = np.tensordot(prod,transfer[i],axes=(-1,0))
return np.trace(prod)
def randomState(d,N):
state = (np.random.rand(d**N)-.5) + (np.random.rand(d**N)-.5)*1j
state = state/np.linalg.norm(state)
return state
def cShape(d,N):
""" Returns the shape of c tensor representation.
I.e. simply just (d,d,...,d) N times.
"""
return tuple([d for i in xrange(N)])
if __name__ == "__main__":
main()<|fim▁end|>
|
import numpy as np
|
<|file_name|>ExtendedListFragment.java<|end_file_name|><|fim▁begin|>/**
* ownCloud Android client application
*
* @author Mario Danic
* Copyright (C) 2017 Mario Danic
* Copyright (C) 2012 Bartek Przybylski
* Copyright (C) 2012-2016 ownCloud Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.owncloud.android.ui.fragment;
import android.animation.LayoutTransition;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.DrawableRes;
import android.support.annotation.StringRes;
import android.support.design.widget.BottomNavigationView;
import android.support.v4.app.Fragment;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.SearchView;
import android.text.TextUtils;
import android.util.DisplayMetrics;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.BaseAdapter;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.getbase.floatingactionbutton.FloatingActionButton;
import com.getbase.floatingactionbutton.FloatingActionsMenu;
import com.owncloud.android.MainApp;
import com.owncloud.android.R;
import com.owncloud.android.authentication.AccountUtils;
import com.owncloud.android.lib.common.utils.Log_OC;
import com.owncloud.android.lib.resources.files.SearchOperation;
import com.owncloud.android.ui.ExtendedListView;
import com.owncloud.android.ui.activity.FileDisplayActivity;
import com.owncloud.android.ui.activity.FolderPickerActivity;
import com.owncloud.android.ui.activity.OnEnforceableRefreshListener;
import com.owncloud.android.ui.activity.UploadFilesActivity;
import com.owncloud.android.ui.adapter.FileListListAdapter;
import com.owncloud.android.ui.adapter.LocalFileListAdapter;
import com.owncloud.android.ui.events.SearchEvent;
import org.greenrobot.eventbus.EventBus;
import org.parceler.Parcel;
import java.util.ArrayList;
import third_parties.in.srain.cube.GridViewWithHeaderAndFooter;
import static android.content.res.Configuration.ORIENTATION_LANDSCAPE;
public class ExtendedListFragment extends Fragment
implements OnItemClickListener, OnEnforceableRefreshListener, SearchView.OnQueryTextListener {
protected static final String TAG = ExtendedListFragment.class.getSimpleName();
protected static final String KEY_SAVED_LIST_POSITION = "SAVED_LIST_POSITION";
private static final String KEY_INDEXES = "INDEXES";
private static final String KEY_FIRST_POSITIONS = "FIRST_POSITIONS";
private static final String KEY_TOPS = "TOPS";
private static final String KEY_HEIGHT_CELL = "HEIGHT_CELL";
private static final String KEY_EMPTY_LIST_MESSAGE = "EMPTY_LIST_MESSAGE";
private static final String KEY_IS_GRID_VISIBLE = "IS_GRID_VISIBLE";
protected SwipeRefreshLayout mRefreshListLayout;
private SwipeRefreshLayout mRefreshGridLayout;
protected SwipeRefreshLayout mRefreshEmptyLayout;
protected LinearLayout mEmptyListContainer;
protected TextView mEmptyListMessage;
protected TextView mEmptyListHeadline;
protected ImageView mEmptyListIcon;
protected ProgressBar mEmptyListProgress;
private FloatingActionsMenu mFabMain;
private FloatingActionButton mFabUpload;
private FloatingActionButton mFabMkdir;
private FloatingActionButton mFabUploadFromApp;
// Save the state of the scroll in browsing
private ArrayList<Integer> mIndexes;
private ArrayList<Integer> mFirstPositions;
private ArrayList<Integer> mTops;
private int mHeightCell = 0;
private SwipeRefreshLayout.OnRefreshListener mOnRefreshListener = null;
protected AbsListView mCurrentListView;
private ExtendedListView mListView;
private View mListFooterView;
private GridViewWithHeaderAndFooter mGridView;
private View mGridFooterView;
private BaseAdapter mAdapter;
protected SearchView searchView;
private Handler handler = new Handler();
@Parcel
public enum SearchType {
NO_SEARCH,
REGULAR_FILTER,
FILE_SEARCH,
FAVORITE_SEARCH,
FAVORITE_SEARCH_FILTER,
VIDEO_SEARCH,
VIDEO_SEARCH_FILTER,
PHOTO_SEARCH,
PHOTOS_SEARCH_FILTER,
RECENTLY_MODIFIED_SEARCH,
RECENTLY_MODIFIED_SEARCH_FILTER,
RECENTLY_ADDED_SEARCH,
RECENTLY_ADDED_SEARCH_FILTER,
// not a real filter, but nevertheless
SHARED_FILTER
}
protected void setListAdapter(BaseAdapter listAdapter) {
mAdapter = listAdapter;
mCurrentListView.setAdapter(listAdapter);
mCurrentListView.invalidateViews();
}
protected AbsListView getListView() {
return mCurrentListView;
}
public FloatingActionButton getFabUpload() {
return mFabUpload;
}
public FloatingActionButton getFabUploadFromApp() {
return mFabUploadFromApp;
}
public FloatingActionButton getFabMkdir() {
return mFabMkdir;
}
public FloatingActionsMenu getFabMain() {
return mFabMain;
}
public void switchToGridView() {
if (!isGridEnabled()) {
mListView.setAdapter(null);
mRefreshListLayout.setVisibility(View.GONE);
mRefreshGridLayout.setVisibility(View.VISIBLE);
mCurrentListView = mGridView;
setListAdapter(mAdapter);
}
}
public void switchToListView() {
if (isGridEnabled()) {
mGridView.setAdapter(null);
mRefreshGridLayout.setVisibility(View.GONE);
mRefreshListLayout.setVisibility(View.VISIBLE);
mCurrentListView = mListView;
setListAdapter(mAdapter);
}
}
public boolean isGridEnabled() {
return (mCurrentListView != null && mCurrentListView.equals(mGridView));
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
final MenuItem item = menu.findItem(R.id.action_search);
searchView = (SearchView) MenuItemCompat.getActionView(item);
searchView.setOnQueryTextListener(this);
final Handler handler = new Handler();
DisplayMetrics displaymetrics = new DisplayMetrics();
Activity activity;
if ((activity = getActivity()) != null) {
activity.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
int width = displaymetrics.widthPixels;
if (getResources().getConfiguration().orientation == ORIENTATION_LANDSCAPE) {
searchView.setMaxWidth((int) (width * 0.4));
} else {
if (activity instanceof FolderPickerActivity) {
searchView.setMaxWidth((int) (width * 0.8));
} else {
searchView.setMaxWidth((int) (width * 0.7));
}
}
}
searchView.setOnQueryTextFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, final boolean hasFocus) {
if (hasFocus) {
mFabMain.collapse();
}
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (getActivity() != null && !(getActivity() instanceof FolderPickerActivity)) {
setFabEnabled(!hasFocus);
boolean searchSupported = AccountUtils.hasSearchSupport(AccountUtils.
getCurrentOwnCloudAccount(MainApp.getAppContext()));
if (getResources().getBoolean(R.bool.bottom_toolbar_enabled) && searchSupported) {
BottomNavigationView bottomNavigationView = (BottomNavigationView) getActivity().
findViewById(R.id.bottom_navigation_view);
if (hasFocus) {
bottomNavigationView.setVisibility(View.GONE);
} else {
bottomNavigationView.setVisibility(View.VISIBLE);
}
}
}
}
}, 100);
}
});
final View mSearchEditFrame = searchView
.findViewById(android.support.v7.appcompat.R.id.search_edit_frame);
ViewTreeObserver vto = mSearchEditFrame.getViewTreeObserver();
vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
int oldVisibility = -1;
@Override
public void onGlobalLayout() {
int currentVisibility = mSearchEditFrame.getVisibility();
if (currentVisibility != oldVisibility) {
if (currentVisibility == View.VISIBLE) {
setEmptyListMessage(SearchType.REGULAR_FILTER);
} else {
setEmptyListMessage(SearchType.NO_SEARCH);
}
oldVisibility = currentVisibility;
}
}
});
LinearLayout searchBar = (LinearLayout) searchView.findViewById(R.id.search_bar);
searchBar.setLayoutTransition(new LayoutTransition());
}
public boolean onQueryTextChange(final String query) {
if (getFragmentManager().findFragmentByTag(FileDisplayActivity.TAG_SECOND_FRAGMENT)
instanceof ExtendedListFragment){
performSearch(query, false);
return true;
} else {
return false;
}
}
@Override
public boolean onQueryTextSubmit(String query) {
if (getFragmentManager().findFragmentByTag(FileDisplayActivity.TAG_SECOND_FRAGMENT)
instanceof ExtendedListFragment){
performSearch(query, true);
return true;
} else {
return false;
}
}
private void performSearch(final String query, boolean isSubmit) {
handler.removeCallbacksAndMessages(null);
if (!TextUtils.isEmpty(query)) {
int delay = 500;
if (isSubmit) {
delay = 0;
}
if (mAdapter != null && mAdapter instanceof FileListListAdapter) {
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (AccountUtils.hasSearchSupport(AccountUtils.
getCurrentOwnCloudAccount(MainApp.getAppContext()))) {
EventBus.getDefault().post(new SearchEvent(query, SearchOperation.SearchType.FILE_SEARCH,
SearchEvent.UnsetType.NO_UNSET));
} else {
FileListListAdapter fileListListAdapter = (FileListListAdapter) mAdapter;
fileListListAdapter.getFilter().filter(query);
}
}
}, delay);
} else if (mAdapter != null && mAdapter instanceof LocalFileListAdapter) {
handler.postDelayed(new Runnable() {
@Override
public void run() {
LocalFileListAdapter localFileListAdapter = (LocalFileListAdapter) mAdapter;
localFileListAdapter.filter(query);
}
}, delay);
}
if (searchView != null && delay == 0) {
searchView.clearFocus();
}
} else {
Activity activity;
if ((activity = getActivity()) != null) {
if (activity instanceof FileDisplayActivity) {
((FileDisplayActivity) activity).refreshListOfFilesFragment(true);
} else if (activity instanceof UploadFilesActivity) {<|fim▁hole|> LocalFileListAdapter localFileListAdapter = (LocalFileListAdapter) mAdapter;
localFileListAdapter.filter(query);
} else if (activity instanceof FolderPickerActivity) {
((FolderPickerActivity) activity).refreshListOfFilesFragment(true);
}
}
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
Log_OC.d(TAG, "onCreateView");
View v = inflater.inflate(R.layout.list_fragment, null);
setupEmptyList(v);
mListView = (ExtendedListView) (v.findViewById(R.id.list_root));
mListView.setOnItemClickListener(this);
mListFooterView = inflater.inflate(R.layout.list_footer, null, false);
mGridView = (GridViewWithHeaderAndFooter) (v.findViewById(R.id.grid_root));
mGridView.setNumColumns(GridView.AUTO_FIT);
mGridView.setOnItemClickListener(this);
mGridFooterView = inflater.inflate(R.layout.list_footer, null, false);
// Pull-down to refresh layout
mRefreshListLayout = (SwipeRefreshLayout) v.findViewById(R.id.swipe_containing_list);
mRefreshGridLayout = (SwipeRefreshLayout) v.findViewById(R.id.swipe_containing_grid);
mRefreshEmptyLayout = (SwipeRefreshLayout) v.findViewById(R.id.swipe_containing_empty);
onCreateSwipeToRefresh(mRefreshListLayout);
onCreateSwipeToRefresh(mRefreshGridLayout);
onCreateSwipeToRefresh(mRefreshEmptyLayout);
mListView.setEmptyView(mRefreshEmptyLayout);
mGridView.setEmptyView(mRefreshEmptyLayout);
mFabMain = (FloatingActionsMenu) v.findViewById(R.id.fab_main);
mFabUpload = (FloatingActionButton) v.findViewById(R.id.fab_upload);
mFabMkdir = (FloatingActionButton) v.findViewById(R.id.fab_mkdir);
mFabUploadFromApp = (FloatingActionButton) v.findViewById(R.id.fab_upload_from_app);
boolean searchSupported = AccountUtils.hasSearchSupport(AccountUtils.
getCurrentOwnCloudAccount(MainApp.getAppContext()));
if (getResources().getBoolean(R.bool.bottom_toolbar_enabled) && searchSupported) {
RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams) mFabMain.getLayoutParams();
final float scale = v.getResources().getDisplayMetrics().density;
BottomNavigationView bottomNavigationView = (BottomNavigationView)
v.findViewById(R.id.bottom_navigation_view);
// convert the DP into pixel
int pixel = (int) (32 * scale + 0.5f);
layoutParams.setMargins(0, 0, pixel / 2, bottomNavigationView.getMeasuredHeight() + pixel * 2);
}
mCurrentListView = mListView; // list by default
if (savedInstanceState != null) {
if (savedInstanceState.getBoolean(KEY_IS_GRID_VISIBLE, false)) {
switchToGridView();
}
int referencePosition = savedInstanceState.getInt(KEY_SAVED_LIST_POSITION);
if (isGridEnabled()) {
Log_OC.v(TAG, "Setting grid position " + referencePosition);
mGridView.setSelection(referencePosition);
} else {
Log_OC.v(TAG, "Setting and centering around list position " + referencePosition);
mListView.setAndCenterSelection(referencePosition);
}
}
return v;
}
protected void setupEmptyList(View view) {
mEmptyListContainer = (LinearLayout) view.findViewById(R.id.empty_list_view);
mEmptyListMessage = (TextView) view.findViewById(R.id.empty_list_view_text);
mEmptyListHeadline = (TextView) view.findViewById(R.id.empty_list_view_headline);
mEmptyListIcon = (ImageView) view.findViewById(R.id.empty_list_icon);
mEmptyListProgress = (ProgressBar) view.findViewById(R.id.empty_list_progress);
}
/**
* {@inheritDoc}
*/
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (savedInstanceState != null) {
mIndexes = savedInstanceState.getIntegerArrayList(KEY_INDEXES);
mFirstPositions = savedInstanceState.getIntegerArrayList(KEY_FIRST_POSITIONS);
mTops = savedInstanceState.getIntegerArrayList(KEY_TOPS);
mHeightCell = savedInstanceState.getInt(KEY_HEIGHT_CELL);
setMessageForEmptyList(savedInstanceState.getString(KEY_EMPTY_LIST_MESSAGE));
} else {
mIndexes = new ArrayList<>();
mFirstPositions = new ArrayList<>();
mTops = new ArrayList<>();
mHeightCell = 0;
}
}
@Override
public void onSaveInstanceState(Bundle savedInstanceState) {
super.onSaveInstanceState(savedInstanceState);
Log_OC.d(TAG, "onSaveInstanceState()");
savedInstanceState.putBoolean(KEY_IS_GRID_VISIBLE, isGridEnabled());
savedInstanceState.putInt(KEY_SAVED_LIST_POSITION, getReferencePosition());
savedInstanceState.putIntegerArrayList(KEY_INDEXES, mIndexes);
savedInstanceState.putIntegerArrayList(KEY_FIRST_POSITIONS, mFirstPositions);
savedInstanceState.putIntegerArrayList(KEY_TOPS, mTops);
savedInstanceState.putInt(KEY_HEIGHT_CELL, mHeightCell);
savedInstanceState.putString(KEY_EMPTY_LIST_MESSAGE, getEmptyViewText());
}
/**
* Calculates the position of the item that will be used as a reference to
* reposition the visible items in the list when the device is turned to
* other position.
*
* The current policy is take as a reference the visible item in the center
* of the screen.
*
* @return The position in the list of the visible item in the center of the
* screen.
*/
protected int getReferencePosition() {
if (mCurrentListView != null) {
return (mCurrentListView.getFirstVisiblePosition() +
mCurrentListView.getLastVisiblePosition()) / 2;
} else {
return 0;
}
}
/*
* Restore index and position
*/
protected void restoreIndexAndTopPosition() {
if (mIndexes.size() > 0) {
// needs to be checked; not every browse-up had a browse-down before
int index = mIndexes.remove(mIndexes.size() - 1);
final int firstPosition = mFirstPositions.remove(mFirstPositions.size() - 1);
int top = mTops.remove(mTops.size() - 1);
Log_OC.v(TAG, "Setting selection to position: " + firstPosition + "; top: "
+ top + "; index: " + index);
if (mCurrentListView != null && mCurrentListView.equals(mListView)) {
if (mHeightCell * index <= mListView.getHeight()) {
mListView.setSelectionFromTop(firstPosition, top);
} else {
mListView.setSelectionFromTop(index, 0);
}
} else {
if (mHeightCell * index <= mGridView.getHeight()) {
mGridView.setSelection(firstPosition);
//mGridView.smoothScrollToPosition(firstPosition);
} else {
mGridView.setSelection(index);
//mGridView.smoothScrollToPosition(index);
}
}
}
}
/*
* Save index and top position
*/
protected void saveIndexAndTopPosition(int index) {
mIndexes.add(index);
int firstPosition = mCurrentListView.getFirstVisiblePosition();
mFirstPositions.add(firstPosition);
View view = mCurrentListView.getChildAt(0);
int top = (view == null) ? 0 : view.getTop();
mTops.add(top);
// Save the height of a cell
mHeightCell = (view == null || mHeightCell != 0) ? mHeightCell : view.getHeight();
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
// to be @overriden
}
@Override
public void onRefresh() {
if (searchView != null) {
searchView.onActionViewCollapsed();
Activity activity;
if ((activity = getActivity()) != null && activity instanceof FileDisplayActivity) {
FileDisplayActivity fileDisplayActivity = (FileDisplayActivity) activity;
fileDisplayActivity.setDrawerIndicatorEnabled(fileDisplayActivity.isDrawerIndicatorAvailable());
}
}
mRefreshListLayout.setRefreshing(false);
mRefreshGridLayout.setRefreshing(false);
mRefreshEmptyLayout.setRefreshing(false);
if (mOnRefreshListener != null) {
mOnRefreshListener.onRefresh();
}
}
public void setOnRefreshListener(OnEnforceableRefreshListener listener) {
mOnRefreshListener = listener;
}
/**
* Disables swipe gesture.
*
* Sets the 'enabled' state of the refresh layouts contained in the fragment.
*
* When 'false' is set, prevents user gestures but keeps the option to refresh programatically,
*
* @param enabled Desired state for capturing swipe gesture.
*/
public void setSwipeEnabled(boolean enabled) {
mRefreshListLayout.setEnabled(enabled);
mRefreshGridLayout.setEnabled(enabled);
mRefreshEmptyLayout.setEnabled(enabled);
}
/**
* Sets the 'visibility' state of the FAB contained in the fragment.
*
* When 'false' is set, FAB visibility is set to View.GONE programmatically,
*
* @param enabled Desired visibility for the FAB.
*/
public void setFabEnabled(boolean enabled) {
if (enabled) {
mFabMain.setVisibility(View.VISIBLE);
} else {
mFabMain.setVisibility(View.GONE);
}
}
/**
* Set message for empty list view.
*/
public void setMessageForEmptyList(String message) {
if (mEmptyListContainer != null && mEmptyListMessage != null) {
mEmptyListMessage.setText(message);
}
}
/**
* displays an empty list information with a headline, a message and an icon.
*
* @param headline the headline
* @param message the message
* @param icon the icon to be shown
*/
public void setMessageForEmptyList(@StringRes final int headline, @StringRes final int message, @DrawableRes final int icon) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
if (mEmptyListContainer != null && mEmptyListMessage != null) {
mEmptyListHeadline.setText(headline);
mEmptyListMessage.setText(message);
mEmptyListIcon.setImageResource(icon);
mEmptyListIcon.setVisibility(View.VISIBLE);
mEmptyListProgress.setVisibility(View.GONE);
}
}
});
}
public void setEmptyListMessage(final SearchType searchType) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
if (searchType == SearchType.NO_SEARCH) {
setMessageForEmptyList(
R.string.file_list_empty_headline,
R.string.file_list_empty,
R.drawable.ic_list_empty_folder
);
} else if (searchType == SearchType.FILE_SEARCH) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search,
R.string.file_list_empty, R.drawable.ic_search_light_grey);
} else if (searchType == SearchType.FAVORITE_SEARCH) {
setMessageForEmptyList(R.string.file_list_empty_favorite_headline,
R.string.file_list_empty_favorites_filter_list, R.drawable.ic_star_light_grey);
} else if (searchType == SearchType.VIDEO_SEARCH) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search_videos,
R.string.file_list_empty_text_videos, R.drawable.ic_list_empty_video);
} else if (searchType == SearchType.PHOTO_SEARCH) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search_photos,
R.string.file_list_empty_text_photos, R.drawable.ic_list_empty_image);
} else if (searchType == SearchType.RECENTLY_MODIFIED_SEARCH) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search,
R.string.file_list_empty_recently_modified, R.drawable.ic_list_empty_recent);
} else if (searchType == SearchType.RECENTLY_ADDED_SEARCH) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search,
R.string.file_list_empty_recently_added, R.drawable.ic_list_empty_recent);
} else if (searchType == SearchType.REGULAR_FILTER) {
setMessageForEmptyList(R.string.file_list_empty_headline_search,
R.string.file_list_empty_search, R.drawable.ic_search_light_grey);
} else if (searchType == SearchType.FAVORITE_SEARCH_FILTER) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search,
R.string.file_list_empty_favorites_filter, R.drawable.ic_star_light_grey);
} else if (searchType == SearchType.VIDEO_SEARCH_FILTER) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search_videos,
R.string.file_list_empty_text_videos_filter, R.drawable.ic_list_empty_video);
} else if (searchType == SearchType.PHOTOS_SEARCH_FILTER) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search_photos,
R.string.file_list_empty_text_photos_filter, R.drawable.ic_list_empty_image);
} else if (searchType == SearchType.RECENTLY_MODIFIED_SEARCH_FILTER) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search,
R.string.file_list_empty_recently_modified_filter, R.drawable.ic_list_empty_recent);
} else if (searchType == SearchType.RECENTLY_ADDED_SEARCH_FILTER) {
setMessageForEmptyList(R.string.file_list_empty_headline_server_search,
R.string.file_list_empty_recently_added_filter, R.drawable.ic_list_empty_recent);
} else if (searchType == SearchType.SHARED_FILTER) {
setMessageForEmptyList(R.string.file_list_empty_shared_headline,
R.string.file_list_empty_shared, R.drawable.ic_list_empty_shared);
}
}
});
}
/**
* Set message for empty list view.
*/
public void setEmptyListLoadingMessage() {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
if (mEmptyListContainer != null && mEmptyListMessage != null) {
mEmptyListHeadline.setText(R.string.file_list_loading);
mEmptyListMessage.setText("");
mEmptyListIcon.setVisibility(View.GONE);
mEmptyListProgress.setVisibility(View.VISIBLE);
}
}
});
}
/**
* Get the text of EmptyListMessage TextView.
*
* @return String empty text view text-value
*/
public String getEmptyViewText() {
return (mEmptyListContainer != null && mEmptyListMessage != null) ? mEmptyListMessage.getText().toString() : "";
}
protected void onCreateSwipeToRefresh(SwipeRefreshLayout refreshLayout) {
// Colors in animations
refreshLayout.setColorSchemeResources(R.color.color_accent, R.color.primary, R.color.primary_dark);
refreshLayout.setOnRefreshListener(this);
}
@Override
public void onRefresh(boolean ignoreETag) {
mRefreshListLayout.setRefreshing(false);
mRefreshGridLayout.setRefreshing(false);
mRefreshEmptyLayout.setRefreshing(false);
if (mOnRefreshListener != null) {
mOnRefreshListener.onRefresh();
}
}
protected void setChoiceMode(int choiceMode) {
mListView.setChoiceMode(choiceMode);
mGridView.setChoiceMode(choiceMode);
}
protected void setMultiChoiceModeListener(AbsListView.MultiChoiceModeListener listener) {
mListView.setMultiChoiceModeListener(listener);
mGridView.setMultiChoiceModeListener(listener);
}
/**
* TODO doc
* To be called before setAdapter, or GridViewWithHeaderAndFooter will throw an exception
*
* @param enabled flag if footer should be shown/calculated
*/
protected void setFooterEnabled(boolean enabled) {
if (enabled) {
if (mGridView.getFooterViewCount() == 0 && mGridView.isCorrectAdapter()) {
if (mGridFooterView.getParent() != null) {
((ViewGroup) mGridFooterView.getParent()).removeView(mGridFooterView);
}
mGridView.addFooterView(mGridFooterView, null, false);
}
mGridFooterView.invalidate();
if (mListView.getFooterViewsCount() == 0) {
if (mListFooterView.getParent() != null) {
((ViewGroup) mListFooterView.getParent()).removeView(mListFooterView);
}
mListView.addFooterView(mListFooterView, null, false);
}
mListFooterView.invalidate();
} else {
mGridView.removeFooterView(mGridFooterView);
mListView.removeFooterView(mListFooterView);
}
}
/**
* set the list/grid footer text.
*
* @param text the footer text
*/
protected void setFooterText(String text) {
if (text != null && text.length() > 0) {
((TextView) mListFooterView.findViewById(R.id.footerText)).setText(text);
((TextView) mGridFooterView.findViewById(R.id.footerText)).setText(text);
setFooterEnabled(true);
} else {
setFooterEnabled(false);
}
}
}<|fim▁end|>
| |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate clap;
#[allow(dead_code)]
#[path = "src/app.rs"]
mod app;
use clap::Shell;
<|fim▁hole|> let outdir = concat!(env!("CARGO_MANIFEST_DIR"), "/scripts/completion");
std::fs::create_dir_all(&outdir).unwrap();
let mut app = app::build();
app.gen_completions(crate_name!(), Shell::Bash, &outdir);
app.gen_completions(crate_name!(), Shell::Fish, &outdir);
app.gen_completions(crate_name!(), Shell::Zsh, &outdir);
app.gen_completions(crate_name!(), Shell::PowerShell, &outdir);
}<|fim▁end|>
|
fn main() {
|
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md><|fim▁hole|>}<|fim▁end|>
|
fn main() {
println!("cargo:rustc-flags=-l icm32");
|
<|file_name|>motiondetect.py<|end_file_name|><|fim▁begin|>import os #for OS program calls
import sys #For Clean sys.exit command
import time #for sleep/pause
import RPi.GPIO as io #read the GPIO pins
io.setmode(io.BCM)
pir_pin = 17<|fim▁hole|>screen_saver = False
io.setup(pir_pin, io.IN)
while True:
if screen_saver:
if io.input(pir_pin):
os.system("xscreensaver-command -deactivate")
screen_saver = False
else:
time.sleep(300)
os.system("xscreensaver-command -activate")
screen_saver = True<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate FizzBuzz;
fn main() {<|fim▁hole|> println!("{}", r);
}
}<|fim▁end|>
|
for r in (1..101).map(FizzBuzz::fizz_buzz) {
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> * It doesn't matter if you export `.ios` or `.android`, either one but only one.
*/
export * from "./local-notifications.ios";
// Export any shared classes, constants, etc.
export * from "./local-notifications-common";<|fim▁end|>
|
* iOS and Android apis should match.
|
<|file_name|>diff.py<|end_file_name|><|fim▁begin|>import logging
from .. import exceptions
from ..plan import COMPLETE, Plan
from ..status import NotSubmittedStatus, NotUpdatedStatus
from . import build
import difflib
import json
logger = logging.getLogger(__name__)
def diff_dictionaries(old_dict, new_dict):
"""Diffs two single dimension dictionaries
Returns the number of changes and an unordered list
expressing the common entries and changes.
Args:
old_dict(dict): old dictionary
new_dict(dict): new dictionary
Returns: list()
int: number of changed records
list: [str(<change type>), <key>, <value>]
Where <change type>: +, - or <space>
"""
old_set = set(old_dict)
new_set = set(new_dict)
added_set = new_set - old_set
removed_set = old_set - new_set
common_set = old_set & new_set
changes = 0
output = []
for key in added_set:
changes += 1
output.append(["+", key, new_dict[key]])
for key in removed_set:
changes += 1
output.append(["-", key, old_dict[key]])
for key in common_set:
if str(old_dict[key]) != str(new_dict[key]):
changes += 1
output.append(["-", key, old_dict[key]])
output.append(["+", key, new_dict[key]])
else:
output.append([" ", key, new_dict[key]])
return [changes, output]
def print_diff_parameters(parameter_diff):
"""Handles the printing of differences in parameters.
Args:
parameter_diff (list): A list dictionaries detailing the differences
between two parameters returned by
:func:`stacker.actions.diff.diff_dictionaries`
"""
print """--- Old Parameters
+++ New Parameters
******************"""
for line in parameter_diff:
print "%s%s = %s" % (line[0], line[1], line[2])
def diff_parameters(old_params, new_params):
"""Compares the old vs. new parameters and prints a "diff"
If there are no changes, we print nothing.
Args:
old_params(dict): old paramters
new_params(dict): new parameters
Returns:
list: A list of differences
"""
[changes, diff] = diff_dictionaries(old_params, new_params)
if changes == 0:
return []
return diff
def print_stack_changes(stack_name, new_stack, old_stack, new_params,
old_params):
"""Prints out the paramters (if changed) and stack diff"""
from_file = "old_%s" % (stack_name,)
to_file = "new_%s" % (stack_name,)
lines = difflib.context_diff(
old_stack, new_stack,
fromfile=from_file, tofile=to_file)
template_changes = list(lines)
if not template_changes:
print "*** No changes to template ***"
else:
param_diffs = diff_parameters(old_params, new_params)
print_diff_parameters(param_diffs)
print "".join(template_changes)
class Action(build.Action):
""" Responsible for diff'ing CF stacks in AWS and on disk
Generates the build plan based on stack dependencies (these dependencies
are determined automatically based on references to output values from
other stacks).
The plan is then used to pull the current CloudFormation template from
AWS and compare it to the generated templated based on the current
config.
"""
def _normalize_json(self, template):
"""Normalizes our template for diffing
Args:
template(str): json string representing the template
Returns:
list: json representation of the parameters
"""
obj = json.loads(template)
json_str = json.dumps(obj, sort_keys=True, indent=4)
result = []
lines = json_str.split("\n")
for line in lines:
result.append(line + "\n")
return result
def _print_new_stack(self, stack, parameters):
"""Prints out the parameters & stack contents of a new stack"""
print "New template parameters:"
for param in sorted(parameters,
key=lambda param: param['ParameterKey']):
print "%s = %s" % (param['ParameterKey'], param['ParameterValue'])
print "\nNew template contents:"
print "".join(stack)
def _diff_stack(self, stack, **kwargs):
"""Handles the diffing a stack in CloudFormation vs our config"""
if not build.should_submit(stack):
return NotSubmittedStatus()
if not build.should_update(stack):
return NotUpdatedStatus()
# get the current stack template & params from AWS
try:
[old_template, old_params] = self.provider.get_stack_info(
stack.fqn)
except exceptions.StackDoesNotExist:
old_template = None
old_params = {}
stack.resolve_variables(self.context, self.provider)
# generate our own template & params
new_template = stack.blueprint.rendered
parameters = self.build_parameters(stack)<|fim▁hole|> for p in parameters:
new_params[p['ParameterKey']] = p['ParameterValue']
new_stack = self._normalize_json(new_template)
print "============== Stack: %s ==============" % (stack.name,)
# If this is a completely new template dump our params & stack
if not old_template:
self._print_new_stack(new_stack, parameters)
else:
# Diff our old & new stack/parameters
old_stack = self._normalize_json(old_template)
print_stack_changes(stack.name, new_stack, old_stack, new_params,
old_params)
return COMPLETE
def _generate_plan(self):
plan = Plan(description="Diff stacks")
stacks = self.context.get_stacks_dict()
dependencies = self._get_dependencies()
for stack_name in self.get_stack_execution_order(dependencies):
plan.add(
stacks[stack_name],
run_func=self._diff_stack,
requires=dependencies.get(stack_name),
)
return plan
def run(self, *args, **kwargs):
plan = self._generate_plan()
debug_plan = self._generate_plan()
debug_plan.outline(logging.DEBUG)
logger.info("Diffing stacks: %s", ", ".join(plan.keys()))
plan.execute()
"""Don't ever do anything for pre_run or post_run"""
def pre_run(self, *args, **kwargs):
pass
def post_run(self, *args, **kwargs):
pass<|fim▁end|>
|
new_params = dict()
|
<|file_name|>convert.py<|end_file_name|><|fim▁begin|>from flatten import *
POS_SIZE = 2**23 - 1
NEG_SIZE = -2**23
OPTIMIZE = True
OPTIMIZERS = {
'set': 'SET',
'setglobal': 'SET_GLOBAL',
'local': 'SET_LOCAL',
'get': 'GET',
'getglobal': 'GET_GLOBAL',
'return': 'RETURN',
'recurse': 'RECURSE',
'drop': 'DROP',
'dup': 'DUP',
'[]': 'NEW_LIST',
'{}': 'NEW_DICT',
'swap': 'SWAP',
'rot': 'ROT',
'over': 'OVER',
'pop-from': 'POP_FROM',
'push-to': 'PUSH_TO',
'push-through': 'PUSH_THROUGH',
'has': 'HAS_DICT',
'get-from': 'GET_DICT',
'set-to': 'SET_DICT',
'raise': 'RAISE',
'reraise': 'RERAISE',
'call': 'CALL',
}
ARGED_OPT = set('SET SET_LOCAL SET_GLOBAL GET GET_GLOBAL'.split())
positional_instructions = set('JMP JMPZ LABDA JMPEQ JMPNE ENTER_ERRHAND'.split())
def convert(filename, flat):
bytecode = [SingleInstruction('SOURCE_FILE', String(None, '"' + filename))]
for k in flat:
if isinstance(k, SingleInstruction):
bytecode.append(k)
elif isinstance(k, Code):
for w in k.words:
if isinstance(w, ProperWord):
if OPTIMIZE and w.value in OPTIMIZERS:
if OPTIMIZERS[w.value] in ARGED_OPT:
if bytecode and bytecode[-1].opcode == 'PUSH_LITERAL' and isinstance(bytecode[-1].ref, Ident):
s = bytecode.pop().ref
else:
bytecode.append(SingleInstruction('PUSH_WORD', w))
continue
else:
s = 0
bytecode.append(SingleInstruction(OPTIMIZERS[w.value], s))
elif w.value == 'for':
mstart = Marker()
mend = Marker()
bytecode.extend([
mstart,
SingleInstruction('DUP', 0),
SingleInstruction('JMPZ', mend),
SingleInstruction('CALL', 0),
SingleInstruction('JMP', mstart),
mend,
SingleInstruction('DROP', 0)
])
elif w.value == '(:split:)':
mparent = Marker()
mchild = Marker()
bytecode.extend([
SingleInstruction('LABDA', mparent),
SingleInstruction('JMP', mchild),
mparent,
SingleInstruction('RETURN', 0),
mchild,
])
elif w.value == '\xce\xbb': #U+03BB GREEK SMALL LETTER LAMDA
for i in range(len(bytecode) - 1, -1, -1):
l = bytecode[i]
if isinstance(l, SingleInstruction) and l.opcode == 'PUSH_WORD' and l.ref.value == ';':
l.opcode = 'LABDA'
l.ref = Marker()
bytecode.extend([
SingleInstruction('RETURN', 0),
l.ref,
])
break
else:
raise DejaSyntaxError('Inline lambda without closing semi-colon.')
elif '!' in w.value:
if w.value.startswith('!'):
w.value = 'eva' + w.value
if w.value.endswith('!'):
w.value = w.value[:-1]
args = w.value.split('!')
base = args.pop(0)
bytecode.extend(SingleInstruction('PUSH_LITERAL', x) for x in reversed(args))
bytecode.extend([
SingleInstruction('PUSH_WORD', base),
SingleInstruction('GET_DICT', 0),
SingleInstruction('CALL', 0)
])
else:
bytecode.append(SingleInstruction('PUSH_WORD', w))
elif isinstance(w, Number) and w.value.is_integer() and w.value <= POS_SIZE and w.value >= NEG_SIZE:
bytecode.append(SingleInstruction('PUSH_INTEGER', int(w.value)))
else:
bytecode.append(SingleInstruction('PUSH_LITERAL', w))
elif isinstance(k, Marker):
bytecode.append(k)
elif isinstance(k, GoTo):
bytecode.append(SingleInstruction('JMP', k.index))
elif isinstance(k, Branch):
bytecode.append(SingleInstruction('JMPZ', k.index))
elif isinstance(k, LabdaNode):
bytecode.append(SingleInstruction('LABDA', k.index))
bytecode.append(SingleInstruction('RETURN', 0))
return bytecode
def is_return(node):
return isinstance(node, SingleInstruction) and node.opcode == 'RETURN'
def is_jump_to(node, marker):
return isinstance(node, SingleInstruction) and node.opcode == 'JMP' and node.ref is marker
def is_pass(node):
return isinstance(node, SingleInstruction) and node.opcode == 'PUSH_WORD' and (node.ref == 'pass' or (isinstance(node.ref, ProperWord) and node.ref.value == 'pass'))
def is_linenr(node):
return isinstance(node, SingleInstruction) and node.opcode == 'LINE_NUMBER'
def get(l, i):<|fim▁hole|> return l[i]
except IndexError:
return None
def optimize(flattened): #optimize away superfluous RETURN statements
for i, instruction in reversed(list(enumerate(flattened))):
if (is_return(instruction) and (is_return(get(flattened, i + 1)) or (isinstance(get(flattened, i + 1), Marker) and is_return(get(flattened, i + 2))))
or isinstance(get(flattened, i + 1), Marker) and is_jump_to(instruction, get(flattened, i + 1))
or isinstance(get(flattened, i + 2), Marker) and isinstance(get(flattened, i + 1), Marker) and is_jump_to(instruction, get(flattened, i + 2))
or is_pass(instruction)
or is_linenr(instruction) and is_linenr(get(flattened, i + 1))
):
flattened.pop(i)
return flattened
def refine(flattened): #removes all markers and replaces them by indices
#first pass: fill dictionary
memo = {}
i = 0
while i < len(flattened):
item = flattened[i]
if isinstance(item, Marker):
memo[item] = i
del flattened[i]
else:
i += 1
#second pass: change all goto and branches
for i, item in enumerate(flattened):
if item.opcode in positional_instructions:
item.ref = memo[item.ref] - i
return flattened<|fim▁end|>
|
try:
|
<|file_name|>keyboard-navigable-list-test.js<|end_file_name|><|fim▁begin|>import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
moduleForComponent('keyboard-navigable-list', 'Integration | Component | keyboard navigable list', {
integration: true
});
test('if passed in an array it renders the items in a list.', function(assert) {
//this.set('theArray', [{ name: 'hello'}, {name: 'second item'}, {name: 'third item'}]);
this.set('theArray', [1, 2, 3]);
// Handle any actions with this.on('myAction', function(val) { ... });
this.render(hbs`{{keyboard-navigable-list contentArray=theArray}}`);
assert.equal(this.$('ul[data-parent-ul] > li').length, 3, 'it renders the proper number of items');
assert.equal(this.$('ul[data-parent-ul] > li').first().text().trim(), 1, 'the first item is 1');
assert.equal(this.$('ul[data-parent-ul] > li').last().text().trim(), 3, 'the last item is 3');
});
test('if passed in an array contains an object key it displays that property on the object.', function(assert) {
this.set('theArray', [{ name: 'hello'}, {name: 'second item'}, {name: 'third item'}]);
this.render(hbs`{{keyboard-navigable-list contentArray=theArray objectKey="name"}}`);
assert.equal(this.$('ul[data-parent-ul] > li').first().text().trim(), 'hello', 'the first item is hello');
assert.equal(this.$('ul[data-parent-ul] > li').last().text().trim(), 'third item', 'the last item is third item');
});
test('after the component loads no li item has the class of active', function(assert) {
this.set('theArray', [{ name: 'hello'}, {name: 'second item'}, {name: 'third item'}]);
this.render(hbs`{{keyboard-navigable-list contentArray=theArray objectKey="name"}}`);
assert.equal(this.$('li.active').length, 0, 'by default no one is active');
});
test('if linkDirection is set, hasLink is true and a link is present', function(assert) {
this.set('theArray', [{ name: 'hello'}, {name: 'second item'}, {name: 'third item'}]);
<|fim▁hole|> assert.equal(this.$('ul[data-parent-ul] > li:eq(0) > a').length, 1, 'if there is a linkDirection there is a link');
});
test('if used as a block level component it gives you access to the individual items in the array', function(assert) {
this.set('theArray', [{ name: 'hello'}, {name: 'second item'}, {name: 'third item'}]);
this.render(hbs`{{#keyboard-navigable-list contentArray=theArray as |person|}} {{person.name}} {{/keyboard-navigable-list}}`);
assert.equal(this.$('ul[data-parent-ul] > li:eq(0)').text().trim(), 'hello', 'we have access to the items from the yield');
});<|fim▁end|>
|
this.render(hbs`{{keyboard-navigable-list contentArray=theArray objectKey="name" linkDirection='people.show'}}`);
|
<|file_name|>mask_test.go<|end_file_name|><|fim▁begin|>package tests
import (
"log"
"reflect"
"time"
"github.com/deepglint/streamtools/st/blocks"
"github.com/deepglint/streamtools/test_utils"
. "launchpad.net/gocheck"
)
type MaskSuite struct{}
var maskSuite = Suite(&MaskSuite{})
func (s *MaskSuite) TestMask(c *C) {
log.Println("testing Mask")
b, ch := test_utils.NewBlock("testingMask", "mask")
go blocks.BlockRoutine(b)
ruleMsg := map[string]interface{}{
"Mask": map[string]interface{}{
".foo": "{}",
},
}
toRule := &blocks.Msg{Msg: ruleMsg, Route: "rule"}
ch.InChan <- toRule
outChan := make(chan *blocks.Msg)<|fim▁hole|> ch.QueryChan <- &blocks.QueryMsg{MsgChan: queryOutChan, Route: "rule"}
time.AfterFunc(time.Duration(5)*time.Second, func() {
ch.QuitChan <- true
})
for {
select {
case messageI := <-queryOutChan:
if !reflect.DeepEqual(messageI, ruleMsg) {
log.Println("Rule mismatch:", messageI, ruleMsg)
c.Fail()
}
case message := <-outChan:
log.Println(message)
case err := <-ch.ErrChan:
if err != nil {
c.Errorf(err.Error())
} else {
return
}
}
}
}<|fim▁end|>
|
ch.AddChan <- &blocks.AddChanMsg{Route: "1", Channel: outChan}
queryOutChan := make(blocks.MsgChan)
|
<|file_name|>InputAdornment.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { OverridableComponent, OverrideProps } from '../OverridableComponent';
export interface InputAdornmentTypeMap<P = {}, D extends React.ElementType = 'div'> {
props: P & {
/**
* The content of the component, normally an `IconButton` or string.
*/
children?: React.ReactNode;
/**
* Disable pointer events on the root.
* This allows for the content of the adornment to focus the input on click.
*/
disablePointerEvents?: boolean;
/**
* If children is a string then disable wrapping in a Typography component.<|fim▁hole|> */
position?: 'start' | 'end';
/**
* The variant to use.
* Note: If you are using the `TextField` component or the `FormControl` component
* you do not have to set this manually.
*/
variant?: 'standard' | 'outlined' | 'filled';
};
defaultComponent: D;
classKey: InputAdornmentClassKey;
}
/**
*
* Demos:
*
* - [Text Fields](https://material-ui.com/components/text-fields/)
*
* API:
*
* - [InputAdornment API](https://material-ui.com/api/input-adornment/)
*/
declare const InputAdornment: OverridableComponent<InputAdornmentTypeMap>;
export type InputAdornmentClassKey =
| 'root'
| 'filled'
| 'positionStart'
| 'positionEnd'
| 'disablePointerEvents'
| 'hiddenLabel'
| 'marginDense';
export type InputAdornmentProps<
D extends React.ElementType = InputAdornmentTypeMap['defaultComponent'],
P = {}
> = OverrideProps<InputAdornmentTypeMap<P, D>, D>;
export default InputAdornment;<|fim▁end|>
|
*/
disableTypography?: boolean;
/**
* The position this adornment should appear relative to the `Input`.
|
<|file_name|>functions_74.js<|end_file_name|><|fim▁begin|>var searchData=
[
['tester',['tester',['../classcontrol_vue.html#a92d898224293b741e5c6d3a3576a2193',1,'controlVue']]],
['testerappuyer',['testerAppuyer',['../class_vue.html#a7fb0d20950a6596a3eef78e244628682',1,'Vue']]],
['testerdossier',['testerDossier',['../classcontrol_vue.html#a630d60f73a0cdb77d2f7f92050983da7',1,'controlVue']]],
['testerfic',['testerFic',['../classcontrol_vue.html#a8139fd2a944a2fca901809edd2468a1e',1,'controlVue']]],
['teststruct',['testStruct',['../classurl_validator.html#a337a9edaa44e76bda5a7ed3a345b0b78',1,'urlValidator']]],
['testurls',['testUrls',['../classparseur_fic.html#ad2c99c1283f03ac105a2927aa9826021',1,'parseurFic']]],<|fim▁hole|> ['testvie',['testVie',['../classurl_validator.html#a9993e82ddcaf00c655e3ad9221a10232',1,'urlValidator']]]
];<|fim▁end|>
| |
<|file_name|>dialogs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Chris Dekter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging, sys, os, re
#from PyKDE4.kdeui import KApplication, KXmlGuiWindow, KStandardAction, KIcon, KTextEdit, KAction, KStandardShortcut
from PyKDE4.kdeui import *
from PyKDE4.kdecore import i18n
from PyQt4.QtGui import *
from PyQt4.QtCore import SIGNAL, Qt, QRegExp
__all__ = ["validate", "EMPTY_FIELD_REGEX", "AbbrSettingsDialog", "HotkeySettingsDialog", "WindowFilterSettingsDialog", "RecordDialog"]
import abbrsettings, hotkeysettings, windowfiltersettings, recorddialog, detectdialog
from autokey import model, iomediator
WORD_CHAR_OPTIONS = {
"All non-word" : model.DEFAULT_WORDCHAR_REGEX,
"Space and Enter" : r"[^ \n]",
"Tab" : r"[^\t]"
}
WORD_CHAR_OPTIONS_ORDERED = ["All non-word", "Space and Enter", "Tab"]
EMPTY_FIELD_REGEX = re.compile(r"^ *$", re.UNICODE)
def validate(expression, message, widget, parent):
if not expression:
KMessageBox.error(parent, message)
if widget is not None:
widget.setFocus()
return expression
class AbbrListItem(QListWidgetItem):<|fim▁hole|> QListWidgetItem.__init__(self, text)
self.setFlags(self.flags() | Qt.ItemFlags(Qt.ItemIsEditable))
def setData(self, role, value):
if value.toString() == "":
self.listWidget().itemChanged.emit(self)
else:
QListWidgetItem.setData(self, role, value)
class AbbrSettings(QWidget, abbrsettings.Ui_Form):
def __init__(self, parent):
QWidget.__init__(self, parent)
abbrsettings.Ui_Form.__init__(self)
self.setupUi(self)
for item in WORD_CHAR_OPTIONS_ORDERED:
self.wordCharCombo.addItem(item)
self.addButton.setIcon(KIcon("list-add"))
self.removeButton.setIcon(KIcon("list-remove"))
def on_addButton_pressed(self):
item = AbbrListItem("")
self.abbrListWidget.addItem(item)
self.abbrListWidget.editItem(item)
self.removeButton.setEnabled(True)
def on_removeButton_pressed(self):
item = self.abbrListWidget.takeItem(self.abbrListWidget.currentRow())
if self.abbrListWidget.count() == 0:
self.removeButton.setEnabled(False)
def on_abbrListWidget_itemChanged(self, item):
if EMPTY_FIELD_REGEX.match(item.text()):
row = self.abbrListWidget.row(item)
self.abbrListWidget.takeItem(row)
del item
if self.abbrListWidget.count() == 0:
self.removeButton.setEnabled(False)
def on_abbrListWidget_itemDoubleClicked(self, item):
self.abbrListWidget.editItem(item)
def on_ignoreCaseCheckbox_stateChanged(self, state):
if not self.ignoreCaseCheckbox.isChecked():
self.matchCaseCheckbox.setChecked(False)
def on_matchCaseCheckbox_stateChanged(self, state):
if self.matchCaseCheckbox.isChecked():
self.ignoreCaseCheckbox.setChecked(True)
def on_immediateCheckbox_stateChanged(self, state):
if self.immediateCheckbox.isChecked():
self.omitTriggerCheckbox.setChecked(False)
self.omitTriggerCheckbox.setEnabled(False)
self.wordCharCombo.setEnabled(False)
else:
self.omitTriggerCheckbox.setEnabled(True)
self.wordCharCombo.setEnabled(True)
class AbbrSettingsDialog(KDialog):
def __init__(self, parent):
KDialog.__init__(self, parent)
self.widget = AbbrSettings(self)
self.setMainWidget(self.widget)
self.setButtons(KDialog.ButtonCodes(KDialog.ButtonCode(KDialog.Ok | KDialog.Cancel)))
self.setPlainCaption(i18n("Set Abbreviations"))
self.setModal(True)
#self.connect(self, SIGNAL("okClicked()"), self.on_okClicked)
def load(self, item):
self.targetItem = item
self.widget.abbrListWidget.clear()
if model.TriggerMode.ABBREVIATION in item.modes:
for abbr in item.abbreviations:
self.widget.abbrListWidget.addItem(AbbrListItem(abbr))
self.widget.removeButton.setEnabled(True)
self.widget.abbrListWidget.setCurrentRow(0)
else:
self.widget.removeButton.setEnabled(False)
self.widget.removeTypedCheckbox.setChecked(item.backspace)
self.__resetWordCharCombo()
wordCharRegex = item.get_word_chars()
if wordCharRegex in WORD_CHAR_OPTIONS.values():
# Default wordchar regex used
for desc, regex in WORD_CHAR_OPTIONS.iteritems():
if item.get_word_chars() == regex:
self.widget.wordCharCombo.setCurrentIndex(WORD_CHAR_OPTIONS_ORDERED.index(desc))
break
else:
# Custom wordchar regex used
self.widget.wordCharCombo.addItem(model.extract_wordchars(wordCharRegex))
self.widget.wordCharCombo.setCurrentIndex(len(WORD_CHAR_OPTIONS))
if isinstance(item, model.Folder):
self.widget.omitTriggerCheckbox.setVisible(False)
else:
self.widget.omitTriggerCheckbox.setVisible(True)
self.widget.omitTriggerCheckbox.setChecked(item.omitTrigger)
if isinstance(item, model.Phrase):
self.widget.matchCaseCheckbox.setVisible(True)
self.widget.matchCaseCheckbox.setChecked(item.matchCase)
else:
self.widget.matchCaseCheckbox.setVisible(False)
self.widget.ignoreCaseCheckbox.setChecked(item.ignoreCase)
self.widget.triggerInsideCheckbox.setChecked(item.triggerInside)
self.widget.immediateCheckbox.setChecked(item.immediate)
def save(self, item):
item.modes.append(model.TriggerMode.ABBREVIATION)
item.clear_abbreviations()
item.abbreviations = self.get_abbrs()
item.backspace = self.widget.removeTypedCheckbox.isChecked()
option = unicode(self.widget.wordCharCombo.currentText())
if option in WORD_CHAR_OPTIONS:
item.set_word_chars(WORD_CHAR_OPTIONS[option])
else:
item.set_word_chars(model.make_wordchar_re(option))
if not isinstance(item, model.Folder):
item.omitTrigger = self.widget.omitTriggerCheckbox.isChecked()
if isinstance(item, model.Phrase):
item.matchCase = self.widget.matchCaseCheckbox.isChecked()
item.ignoreCase = self.widget.ignoreCaseCheckbox.isChecked()
item.triggerInside = self.widget.triggerInsideCheckbox.isChecked()
item.immediate = self.widget.immediateCheckbox.isChecked()
def reset(self):
self.widget.removeButton.setEnabled(False)
self.widget.abbrListWidget.clear()
self.__resetWordCharCombo()
self.widget.omitTriggerCheckbox.setChecked(False)
self.widget.removeTypedCheckbox.setChecked(True)
self.widget.matchCaseCheckbox.setChecked(False)
self.widget.ignoreCaseCheckbox.setChecked(False)
self.widget.triggerInsideCheckbox.setChecked(False)
self.widget.immediateCheckbox.setChecked(False)
def __resetWordCharCombo(self):
self.widget.wordCharCombo.clear()
for item in WORD_CHAR_OPTIONS_ORDERED:
self.widget.wordCharCombo.addItem(item)
self.widget.wordCharCombo.setCurrentIndex(0)
def get_abbrs(self):
ret = []
for i in range(self.widget.abbrListWidget.count()):
text = self.widget.abbrListWidget.item(i).text()
ret.append(unicode(text))
return ret
def get_abbrs_readable(self):
abbrs = self.get_abbrs()
if len(abbrs) == 1:
return abbrs[0]
else:
return "[%s]" % ','.join(abbrs)
def reset_focus(self):
self.widget.addButton.setFocus()
def __valid(self):
if not validate(len(self.get_abbrs()) > 0, i18n("You must specify at least one abbreviation"),
self.widget.addButton, self): return False
return True
def slotButtonClicked(self, button):
if button == KDialog.Ok:
if self.__valid():
KDialog.slotButtonClicked(self, button)
else:
self.load(self.targetItem)
KDialog.slotButtonClicked(self, button)
class HotkeySettings(QWidget, hotkeysettings.Ui_Form):
def __init__(self, parent):
QWidget.__init__(self, parent)
hotkeysettings.Ui_Form.__init__(self)
self.setupUi(self)
# ---- Signal handlers
def on_setButton_pressed(self):
self.setButton.setEnabled(False)
self.keyLabel.setText(i18n("Press a key or combination..."))
self.grabber = iomediator.KeyGrabber(self.parentWidget())
self.grabber.start()
class HotkeySettingsDialog(KDialog):
KEY_MAP = {
' ' : "<space>",
}
REVERSE_KEY_MAP = {}
for key, value in KEY_MAP.iteritems():
REVERSE_KEY_MAP[value] = key
def __init__(self, parent):
KDialog.__init__(self, parent)
self.widget = HotkeySettings(self)
self.setMainWidget(self.widget)
self.setButtons(KDialog.ButtonCodes(KDialog.ButtonCode(KDialog.Ok | KDialog.Cancel)))
self.setPlainCaption(i18n("Set Hotkey"))
self.setModal(True)
self.key = None
def load(self, item):
self.targetItem = item
self.widget.setButton.setEnabled(True)
if model.TriggerMode.HOTKEY in item.modes:
self.widget.controlButton.setChecked(iomediator.Key.CONTROL in item.modifiers)
self.widget.altButton.setChecked(iomediator.Key.ALT in item.modifiers)
self.widget.shiftButton.setChecked(iomediator.Key.SHIFT in item.modifiers)
self.widget.superButton.setChecked(iomediator.Key.SUPER in item.modifiers)
self.widget.hyperButton.setChecked(iomediator.Key.HYPER in item.modifiers)
self.widget.metaButton.setChecked(iomediator.Key.META in item.modifiers)
key = item.hotKey
if key in self.KEY_MAP:
keyText = self.KEY_MAP[key]
else:
keyText = key
self._setKeyLabel(keyText)
self.key = keyText
else:
self.reset()
def save(self, item):
item.modes.append(model.TriggerMode.HOTKEY)
# Build modifier list
modifiers = self.build_modifiers()
keyText = self.key
if keyText in self.REVERSE_KEY_MAP:
key = self.REVERSE_KEY_MAP[keyText]
else:
key = keyText
assert key != None, "Attempt to set hotkey with no key"
item.set_hotkey(modifiers, key)
def reset(self):
self.widget.controlButton.setChecked(False)
self.widget.altButton.setChecked(False)
self.widget.shiftButton.setChecked(False)
self.widget.superButton.setChecked(False)
self.widget.hyperButton.setChecked(False)
self.widget.metaButton.setChecked(False)
self._setKeyLabel(i18n("(None)"))
self.key = None
self.widget.setButton.setEnabled(True)
def set_key(self, key, modifiers=[]):
if self.KEY_MAP.has_key(key):
key = self.KEY_MAP[key]
self._setKeyLabel(key)
self.key = key
self.widget.controlButton.setChecked(iomediator.Key.CONTROL in modifiers)
self.widget.altButton.setChecked(iomediator.Key.ALT in modifiers)
self.widget.shiftButton.setChecked(iomediator.Key.SHIFT in modifiers)
self.widget.superButton.setChecked(iomediator.Key.SUPER in modifiers)
self.widget.hyperButton.setChecked(iomediator.Key.HYPER in modifiers)
self.widget.metaButton.setChecked(iomediator.Key.META in modifiers)
self.widget.setButton.setEnabled(True)
def cancel_grab(self):
self.widget.setButton.setEnabled(True)
self._setKeyLabel(self.key)
def build_modifiers(self):
modifiers = []
if self.widget.controlButton.isChecked():
modifiers.append(iomediator.Key.CONTROL)
if self.widget.altButton.isChecked():
modifiers.append(iomediator.Key.ALT)
if self.widget.shiftButton.isChecked():
modifiers.append(iomediator.Key.SHIFT)
if self.widget.superButton.isChecked():
modifiers.append(iomediator.Key.SUPER)
if self.widget.hyperButton.isChecked():
modifiers.append(iomediator.Key.HYPER)
if self.widget.metaButton.isChecked():
modifiers.append(iomediator.Key.META)
modifiers.sort()
return modifiers
def slotButtonClicked(self, button):
if button == KDialog.Ok:
if self.__valid():
KDialog.slotButtonClicked(self, button)
else:
self.load(self.targetItem)
KDialog.slotButtonClicked(self, button)
def _setKeyLabel(self, key):
self.widget.keyLabel.setText(i18n("Key: ") + key)
def __valid(self):
if not validate(self.key is not None, i18n("You must specify a key for the hotkey."),
None, self): return False
return True
class GlobalHotkeyDialog(HotkeySettingsDialog):
def load(self, item):
self.targetItem = item
if item.enabled:
self.widget.controlButton.setChecked(iomediator.Key.CONTROL in item.modifiers)
self.widget.altButton.setChecked(iomediator.Key.ALT in item.modifiers)
self.widget.shiftButton.setChecked(iomediator.Key.SHIFT in item.modifiers)
self.widget.superButton.setChecked(iomediator.Key.SUPER in item.modifiers)
self.widget.hyperButton.setChecked(iomediator.Key.HYPER in item.modifiers)
self.widget.metaButton.setChecked(iomediator.Key.META in item.modifiers)
key = item.hotKey
if key in self.KEY_MAP:
keyText = self.KEY_MAP[key]
else:
keyText = key
self._setKeyLabel(keyText)
self.key = keyText
else:
self.reset()
def save(self, item):
# Build modifier list
modifiers = self.build_modifiers()
keyText = self.key
if keyText in self.REVERSE_KEY_MAP:
key = self.REVERSE_KEY_MAP[keyText]
else:
key = keyText
assert key != None, "Attempt to set hotkey with no key"
item.set_hotkey(modifiers, key)
class WindowFilterSettings(QWidget, windowfiltersettings.Ui_Form):
def __init__(self, parent):
QWidget.__init__(self, parent)
windowfiltersettings.Ui_Form.__init__(self)
self.setupUi(self)
m = QFontMetrics(QApplication.font())
self.triggerRegexLineEdit.setMinimumWidth(m.width("windowclass.WindowClass"))
# ---- Signal handlers
def on_detectButton_pressed(self):
self.detectButton.setEnabled(False)
self.grabber = iomediator.WindowGrabber(self.parentWidget())
self.grabber.start()
class WindowFilterSettingsDialog(KDialog):
def __init__(self, parent):
KDialog.__init__(self, parent)
self.widget = WindowFilterSettings(self)
self.setMainWidget(self.widget)
self.setButtons(KDialog.ButtonCodes(KDialog.ButtonCode(KDialog.Ok | KDialog.Cancel)))
self.setPlainCaption(i18n("Set Window Filter"))
self.setModal(True)
def load(self, item):
self.targetItem = item
if not isinstance(item, model.Folder):
self.widget.recursiveCheckBox.hide()
else:
self.widget.recursiveCheckBox.show()
if not item.has_filter():
self.reset()
else:
self.widget.triggerRegexLineEdit.setText(item.get_filter_regex())
self.widget.recursiveCheckBox.setChecked(item.isRecursive)
def save(self, item):
item.set_window_titles(self.get_filter_text())
item.set_filter_recursive(self.get_is_recursive())
def get_is_recursive(self):
return self.widget.recursiveCheckBox.isChecked()
def reset(self):
self.widget.triggerRegexLineEdit.setText("")
self.widget.recursiveCheckBox.setChecked(False)
def reset_focus(self):
self.widget.triggerRegexLineEdit.setFocus()
def get_filter_text(self):
return unicode(self.widget.triggerRegexLineEdit.text())
def receive_window_info(self, info):
self.parentWidget().topLevelWidget().app.exec_in_main(self.__receiveWindowInfo, info)
def __receiveWindowInfo(self, info):
dlg = DetectDialog(self)
dlg.populate(info)
dlg.exec_()
if dlg.result() == QDialog.Accepted:
self.widget.triggerRegexLineEdit.setText(dlg.get_choice())
self.widget.detectButton.setEnabled(True)
# --- event handlers ---
def slotButtonClicked(self, button):
if button == KDialog.Cancel:
self.load(self.targetItem)
KDialog.slotButtonClicked(self, button)
class DetectSettings(QWidget, detectdialog.Ui_Form):
def __init__(self, parent):
QWidget.__init__(self, parent)
detectdialog.Ui_Form.__init__(self)
self.setupUi(self)
self.kbuttongroup.setSelected(0)
class DetectDialog(KDialog):
def __init__(self, parent):
KDialog.__init__(self, parent)
self.widget = DetectSettings(self)
self.setMainWidget(self.widget)
self.setButtons(KDialog.ButtonCodes(KDialog.ButtonCode(KDialog.Ok | KDialog.Cancel)))
self.setPlainCaption(i18n("Window Information"))
self.setModal(True)
def populate(self, windowInfo):
self.widget.titleLabel.setText(i18n("Window title: %1", windowInfo[0]))
self.widget.classLabel.setText(i18n("Window class: %1", windowInfo[1]))
self.windowInfo = windowInfo
def get_choice(self):
index = self.widget.kbuttongroup.selected()
if index == 0:
return self.windowInfo[1]
else:
return self.windowInfo[0]
class RecordSettings(QWidget, recorddialog.Ui_Form):
def __init__(self, parent):
QWidget.__init__(self, parent)
recorddialog.Ui_Form.__init__(self)
self.setupUi(self)
class RecordDialog(KDialog):
def __init__(self, parent, closure):
KDialog.__init__(self, parent)
self.widget = RecordSettings(self)
self.setMainWidget(self.widget)
self.setButtons(KDialog.ButtonCodes(KDialog.ButtonCode(KDialog.Ok | KDialog.Cancel)))
self.setPlainCaption(i18n("Record Script"))
self.setModal(True)
self.closure = closure
def get_record_keyboard(self):
return self.widget.recKeyboardButton.isChecked()
def get_record_mouse(self):
return self.widget.recMouseButton.isChecked()
def get_delay(self):
return self.widget.secondsSpinBox.value()
def slotButtonClicked(self, button):
if button == KDialog.Ok:
KDialog.slotButtonClicked(self, button)
self.closure(True, self.get_record_keyboard(), self.get_record_mouse(), self.get_delay())
else:
self.closure(False, self.get_record_keyboard(), self.get_record_mouse(), self.get_delay())
KDialog.slotButtonClicked(self, button)<|fim▁end|>
|
def __init__(self, text):
|
<|file_name|>get_data.py<|end_file_name|><|fim▁begin|>import os
import urllib
from glob import glob
import dask.bag as db
import numpy as np
import zarr
from dask.diagnostics import ProgressBar
from netCDF4 import Dataset
def download(url):
opener = urllib.URLopener()
filename = os.path.basename(url)
path = os.path.join('data', filename)
opener.retrieve(url, path)
def download_weather():
# Create data directory
if not os.path.exists('data'):
os.mkdir('data')
template = ('http://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/'
'noaa.oisst.v2.highres/sst.day.mean.{year}.v2.nc')
urls = [template.format(year=year) for year in range(1981, 2016)]
b = db.from_sequence(urls, partition_size=1)
print("Downloading Weather Data")
print("------------------------")
with ProgressBar():
b.map(download).compute(n_workers=8)
def transform_weather():
if os.path.exists('sst.day.mean.v2.zarr'):
return
datasets = [Dataset(path)['sst'] for path in sorted(glob('data/*.nc'))]
n = sum(d.shape[0] for d in datasets)
shape = (n, 720, 1440)
chunks = (72, 360, 360)
f = zarr.open_array('sst.day.mean.v2.zarr', shape=shape, chunks=chunks,
dtype='f4')
i = 0
for d in datasets:
m = d.shape[0]
f[i:i + m] = d[:].filled(np.nan)
i += m
<|fim▁hole|>
if __name__ == '__main__':
download_weather()
transform_weather()<|fim▁end|>
| |
<|file_name|>footerfolder.ts<|end_file_name|><|fim▁begin|>import Folder = require('../../folder');
import Message = require('../../message');
import React = require('react');
import Core = require('../common/core');
import Div = Core.Div;
import Span = Core.Span;
interface FooterFolderProps extends React.Props<any> {
activeFolder: Folder.Folder;
folders: string[];
}
function renderFooterFolder(props: FooterFolderProps)<|fim▁hole|> const activeIndex = folders.indexOf(activeFolder.id) + 1;
const folderName = activeFolder.displayName;
return Div({},
Div({ className: 'infobar-major' },
Span({}, `${activeIndex}/${numFolders}`)),
Div({ className: 'infobar-major' }, Span({}, folderName))
);
}
const FooterFolder = React.createFactory(renderFooterFolder);
export = FooterFolder;<|fim▁end|>
|
{
const folders = props.folders;
const activeFolder = props.activeFolder;
const numFolders = folders.length;
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate regex;
extern crate fall_tree;
extern crate fall_parse;
mod rust;
pub use self::rust::language as lang_rust;
pub use self::rust::{
WHITESPACE,
LINE_COMMENT,
BLOCK_COMMENT,
UNION,
AS,
CRATE,
EXTERN,
FN,
LET,
PUB,
STRUCT,
USE,
MOD,
IF,
ELSE,
ENUM,
IMPL,
SELF,
SUPER,
TYPE,
CONST,
STATIC,
FOR,
LOOP,
WHILE,
MOVE,
MUT,
REF,
TRAIT,<|fim▁hole|> BREAK,
IN,
UNSAFE,
WHERE,
L_PAREN,
R_PAREN,
L_CURLY,
R_CURLY,
L_ANGLE,
R_ANGLE,
L_BRACK,
R_BRACK,
SHL,
SHL_EQ,
SHR,
SHR_EQ,
AND,
OR,
THIN_ARROW,
FAT_ARROW,
EQ,
EQEQ,
BANGEQ,
GTET,
LTEQ,
SEMI,
COLON,
COLONCOLON,
COMMA,
DOT,
DOTDOT,
DOTDOTDOT,
HASH,
DOLLAR,
STAR,
STAR_EQ,
SLASH,
SLASH_EQ,
PERCENT,
PERCENT_EQ,
PLUS,
PLUS_EQ,
MINUS,
MINUS_EQ,
AMPERSAND,
AMPERSAND_EQ,
PIPE,
PIPE_EQ,
UNDERSCORE,
BANG,
QUESTION,
CARET,
CARET_EQ,
CHAR,
LIFETIME,
BOOL,
NUMBER,
STRING,
RAW_STRING,
IDENT,
FILE,
USE_DECL,
USE_SPEC,
USE_SPEC_ENTRY,
EXTERN_CRATE_DECL,
FN_DEF,
LINKAGE,
VALUE_PARAM,
LAMBDA_VALUE_PARAM,
SELF_PARAMETER,
STRUCT_DEF,
STRUCT_FIELD,
TUPLE_FIELD,
ENUM_DEF,
ENUM_VARIANT,
MOD_DEF,
IMPL_DEF,
TRAIT_DEF,
MEMBERS,
TYPE_DEF,
CONST_DEF,
MACRO_ITEM,
EXTERN_BLOCK,
TYPE_PARAMETERS,
TYPE_PARAMETER,
TYPE_BOUND,
LIFETIME_PARAMETER,
VISIBILITY,
WHERE_CLAUSE,
PATH,
TRAIT_PROJECTION_PATH,
PATH_SEGMENT,
TYPE_ARGUMENTS,
FN_TRAIT_SUGAR,
ALIAS,
TYPE_REFERENCE,
PATH_TYPE,
REFERENCE_TYPE,
POINTER_TYPE,
PLACEHOLDER_TYPE,
UNIT_TYPE,
PAREN_TYPE,
TUPLE_TYPE,
NEVER_TYPE,
ARRAY_TYPE,
FN_POINTER_TYPE,
FOR_TYPE,
WILDCARD_PATTERN,
PATH_PATTERN,
TUPE_STRUCT_PATTERN,
STRUCT_PATTERN,
STRUCT_PATTERN_FIELD,
BINDING_PATTERN,
LITERAL_PATTERN,
UNIT_PATTERN,
PAREN_PATTERN,
TUPLE_PATTERN,
REFERENCE_PATTERN,
EXPR,
LITERAL,
PATH_EXPR,
STRUCT_LITERAL,
STRUCT_LITERAL_FIELD,
UNIT_EXPR,
PAREN_EXPR,
TUPLE_EXPR,
ARRAY_LITERAL,
LAMBDA_EXPR,
RETURN_EXPR,
LOOP_CF_EXPR,
BLOCK_EXPR,
LET_STMT,
TYPE_ASCRIPTION,
EMPTY_STMT,
EXPR_STMT,
IF_EXPR,
WHILE_EXPR,
LOOP_EXPR,
FOR_EXPR,
MATCH_EXPR,
MATCH_ARM,
GUARD,
BLOCK_MACRO_EXPR,
LINE_MACRO_EXPR,
METHOD_CALL_EXPR,
CALL_EXPR,
VALUE_ARGUMENT,
FIELD_EXPR,
INDEX_EXPR,
TRY_EXPR,
CAST_EXPR,
REFERENCE_EXPR,
DEREFERENCE_EXPR,
NEGATION_EXPR,
NOT_EXPR,
PRODUCT_EXPR,
SUM_EXPR,
BIT_SHIFT,
BIT_AND,
BIT_XOR,
BIT_OR,
COMPARISON,
LOGICAL_AND,
LOGICAL_OR,
RANGE_EXPR,
ASSIGNMENT_EXPR,
ATTRIBUTE,
INNER_ATTRIBUTE,
ATTR_VALUE,
BLOCK_MACRO,
LINE_MACRO,
TT,
};
pub use self::rust::{
NameOwner,
TypeParametersOwner,
FnDef,
StructDef,
EnumDef,
TraitDef,
TypeDef,
ModDef,
ImplDef,
UseDecl,
TypeParameter,
TypeReference,
LetStmt,
ExprStmt,
};<|fim▁end|>
|
MATCH,
RETURN,
CONTINUE,
|
<|file_name|>CircleShape.cpp<|end_file_name|><|fim▁begin|>#include "Extensions.hpp"
#include "Extensions.inl"
#include <Math/Rect.hpp>
#include <Math/Vector.hpp>
#include <Script/ScriptExtensions.hpp>
#include <SFML/Graphics/CircleShape.hpp>
#include <angelscript.h>
#include <cassert>
namespace
{
void create_CircleShape(void* memory)
{
new(memory)sf::CircleShape();
}
void create_CircleShape_rad(float radius, unsigned int count, void* memory)
{
new(memory)sf::CircleShape(radius, count);
}
bool Reg()
{
Script::ScriptExtensions::AddExtension([](asIScriptEngine* eng) {
int r = 0;
r = eng->SetDefaultNamespace("Shapes"); assert(r >= 0);
r = eng->RegisterObjectType("Circle", sizeof(sf::CircleShape), asOBJ_VALUE | asGetTypeTraits<sf::CircleShape>()); assert(r >= 0);
r = eng->RegisterObjectBehaviour("Circle", asBEHAVE_CONSTRUCT, "void f()", asFUNCTION(create_CircleShape), asCALL_CDECL_OBJLAST); assert(r >= 0);
r = eng->RegisterObjectBehaviour("Circle", asBEHAVE_CONSTRUCT, "void f(float,uint=30)", asFUNCTION(create_CircleShape_rad), asCALL_CDECL_OBJLAST); assert(r >= 0);
r = eng->RegisterObjectMethod("Circle", "void set_PointCount(uint)", asMETHOD(sf::CircleShape, setPointCount), asCALL_THISCALL); assert(r >= 0);
r = eng->RegisterObjectMethod("Circle", "float get_Radius()", asMETHOD(sf::CircleShape, getRadius), asCALL_THISCALL); assert(r >= 0);
r = eng->RegisterObjectMethod("Circle", "void set_Radius(float)", asMETHOD(sf::CircleShape, setRadius), asCALL_THISCALL); assert(r >= 0);
Script::SFML::registerShape<sf::CircleShape>("Circle", eng);<|fim▁hole|> r = eng->SetDefaultNamespace(""); assert(r >= 0);
}, 6);
return true;
}
}
bool Script::SFML::Extensions::CircleShape = Reg();<|fim▁end|>
| |
<|file_name|>compare_shuffled.py<|end_file_name|><|fim▁begin|>import sys
q_to_time = {}
i = 0
for line in open(sys.argv[1]):
try:
line = line.strip()
cols = line.split('\t')
q_to_time[cols[0]] = [(int(cols[2].split(' ')[0]), i)]
i += 1
except ValueError:
continue
i = 0
for line in open(sys.argv[2]):
try:
line = line.strip()
cols = line.split('\t')
q_to_time[cols[0]].append((int(cols[2].split(' ')[0]), i))
i += 1
except KeyError:
continue
except ValueError:
continue
for k,v in q_to_time.items():
if v[0][0] < v[1][0]:
smaller = float(v[0][0])<|fim▁hole|> larger = float(v[0][0])
try:
if (larger / smaller > 2):
print('SIGNIFICANT DIFFERENCE: ' + k + ': (' + str(v[0][0]) + ', ' +
str(v[0][1]) + ') vs (' + str(v[1][0]) + ', ' + str(v[1][1])
+ ').')
print(' -> FACTOR: ' + str(larger / smaller))
except:
print('problem with : ' + k + ' ' + str(larger) + ' ' + str(smaller))<|fim▁end|>
|
larger = float(v[1][0])
else:
smaller = float(v[1][0])
|
<|file_name|>event-name.decorator.spec.ts<|end_file_name|><|fim▁begin|>// std
import { strictEqual, throws } from 'assert';
// 3p
import 'reflect-metadata';
// FoalTS
import { EventName } from './event-name.decorator';
describe('EventName', () => {
it('should define the metadata websocket-event-name=${eventName} on the method class.', () => {
class Foobar {
@EventName('foo')
barfoo() {}
}
const actual = Reflect.getOwnMetadata('websocket-event-name', Foobar.prototype, 'barfoo');
strictEqual(actual, 'foo');
});
it('should throw an error if the event name is empty.', () => {
throws(
() => {
// tslint:disable-next-line:no-unused-variable
class Foobar {
@EventName('')
barfoo() {}
}
},<|fim▁hole|> );
});
});<|fim▁end|>
|
new Error('@EventName does not support empty names.')
|
<|file_name|>ArtworkModel.java<|end_file_name|><|fim▁begin|>package com.simplecity.amp_library.model;
import android.content.Context;
import com.simplecity.amp_library.R;
import java.io.File;
<|fim▁hole|>public class ArtworkModel {
private static final String TAG = "ArtworkModel";
@ArtworkProvider.Type
public int type;
public File file;
public ArtworkModel(@ArtworkProvider.Type int type, File file) {
this.type = type;
this.file = file;
}
public static String getTypeString(Context context, @ArtworkProvider.Type int type) {
switch (type) {
case ArtworkProvider.Type.MEDIA_STORE:
return context.getString(R.string.artwork_type_media_store);
case ArtworkProvider.Type.TAG:
return context.getString(R.string.artwork_type_tag);
case ArtworkProvider.Type.FOLDER:
return "Folder";
case ArtworkProvider.Type.REMOTE:
return context.getString(R.string.artwork_type_internet);
}
return null;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ArtworkModel that = (ArtworkModel) o;
if (type != that.type) return false;
return file != null ? file.equals(that.file) : that.file == null;
}
@Override
public int hashCode() {
int result = type;
result = 31 * result + (file != null ? file.hashCode() : 0);
return result;
}
}<|fim▁end|>
| |
<|file_name|>JobRestartForceTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015-2017 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.eva.runner;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.launch.JobOperator;
import org.springframework.batch.test.JobLauncherTestUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import uk.ac.ebi.eva.pipeline.runner.ManageJobsUtils;
import uk.ac.ebi.eva.test.configuration.AsynchronousBatchTestConfiguration;
import uk.ac.ebi.eva.test.utils.AbstractJobRestartUtils;
/**
* Test to check if the ManageJobUtils.markLastJobAsFailed let us restart a job redoing all the steps.
*/
@RunWith(SpringRunner.class)
@ContextConfiguration(classes = {AsynchronousBatchTestConfiguration.class})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
public class JobRestartForceTest extends AbstractJobRestartUtils {
// Wait until the job has been launched properly. The launch operation is not transactional, and other
// instances of the same job with the same parameter can throw exceptions in this interval.
public static final int INITIALIZE_JOB_SLEEP = 100;
public static final int STEP_TIME_DURATION = 1000;
public static final int WAIT_FOR_JOB_TO_END = 2000;
@Autowired
private JobOperator jobOperator;
@Test
public void forceJobFailureEnsuresCleanRunEvenIfStepsNotRestartables() throws Exception {
Job job = getTestJob(getQuickStep(false), getWaitingStep(false, STEP_TIME_DURATION));
JobLauncherTestUtils jobLauncherTestUtils = getJobLauncherTestUtils(job);
JobExecution jobExecution = launchJob(jobLauncherTestUtils);
Thread.sleep(INITIALIZE_JOB_SLEEP);
jobOperator.stop(jobExecution.getJobId());<|fim▁hole|> Assert.assertFalse(jobExecution.getStepExecutions().isEmpty());
}
}<|fim▁end|>
|
Thread.sleep(WAIT_FOR_JOB_TO_END);
ManageJobsUtils.markLastJobAsFailed(getJobRepository(), job.getName(), new JobParameters());
jobExecution = launchJob(jobLauncherTestUtils);
Thread.sleep(WAIT_FOR_JOB_TO_END);
|
<|file_name|>fortwrangler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Fortwrangler is a tool that attempts to resolve issues with fortran lines over standard length.
# Global libraries
import sys
# Global variables
# Strings inserted for continuation
CONTINUATION_ENDLINE = "&\n"
CONTINUATION_STARTLINE = " &"
# Line length settings
MIN_LENGTH = len(CONTINUATION_STARTLINE) + len(CONTINUATION_ENDLINE) + 1
FIXED_LINE_LENGTH = 80 # We don't actually do fixed format files, but I prefer 80 col anyway.
FREE_LINE_LENGTH = 132
DEFAULT_LINE_LENGTH = FREE_LINE_LENGTH
# I/O settings
STDERR = sys.stderr
STDOUT = sys.stdout
# We can't use Python's string splitter as we want to handle string literals properly.
def string_split(s, sep=" "):
inquotes=False
retlist = []
token = ""
for character in s.strip():
if character == sep and not inquotes:
if not (token == ""):
token = token + sep
retlist.append(token)
token = ""
else:
token = token + character
elif character == '"' and not inquotes:
inquotes = True
token = token + character
elif character == '"' and inquotes:
inquotes = False
token = token + character
else:
token = token + character
if not (token == ""):
retlist.append(token)
return retlist
# Fix a given file.
def force_fix_file(filename, maxlength=DEFAULT_LINE_LENGTH, output=STDOUT):
with open(filename) as infile:<|fim▁hole|>
tempstr=line[:(len(line) - (len(line.lstrip())-1)-1)]
tokens = string_split(line)
index = 0
for t in tokens:
if t == "!":
# Comments can be longer because the compiler just ignores them.
tempstr = tempstr + " ".join(tokens[index:len(tokens)])
break
else:
if (len(tempstr + t + " " + CONTINUATION_ENDLINE)) < maxlength + 1:
tempstr = tempstr + t + " "
else:
if (t.startswith('"') and t.endswith('"')):
tempstr = tempstr + t + " "
while (len(tempstr) > maxlength + 1):
outstr = tempstr[:(maxlength-1)] + CONTINUATION_ENDLINE
output.write(outstr)
tempstr = CONTINUATION_STARTLINE + tempstr[(maxlength-1):]
output.write(tempstr)
tempstr=""
else:
output.write(tempstr + " " + CONTINUATION_ENDLINE)
tempstr=CONTINUATION_STARTLINE + " " + t + " "
index += 1
output.write(tempstr + "\n")
else:
output.write(line)
# Only fix files if the violate the length rules!
def fix_file(filename, maxlength=DEFAULT_LINE_LENGTH, output=STDOUT):
if not check_file(filename):
force_fix_file(filename, maxlength, output)
else:
STDERR.write(filename + " not over line length, not modifying\n")
# Check to see if a file has lines longer than allowed, optionally report.
def check_file(filename, maxlength=DEFAULT_LINE_LENGTH, report=None):
overlengthlines = {}
counter = 0
with open(filename) as f:
for line in f:
counter += 1
if (len(line)) > maxlength + 1: # New lines count in Python line length.
overlengthlines[counter] = len(line)
if report != None:
report.write(filename + ": " + str(len(overlengthlines)) + "\n")
for a in sorted(overlengthlines.keys()):
report.write(str(a) + ": " + str(overlengthlines[a]) + "\n")
return len(overlengthlines) == 0
# Our main procedure.
# Arguments at the command-line:
# -o <file> - write out to file instead of stdout
# -i <extension> - do in place
# -c - check only
# -w <number> - set line length
def main():
import argparse
#check_file("example.f90", report=STDERR)
#fix_file("example.f")
maxlength = DEFAULT_LINE_LENGTH
output = STDOUT
parser = argparse.ArgumentParser(description="Fix free format Fortran files with invalid line lengths.")
parser.add_argument("-c", action="store_true", help="Check only.")
parser.add_argument("-i", metavar="ext", type=str, help="Do in place, back up copy with extension specified.")
parser.add_argument("-w", metavar="linelength", type=int, help="Custom line length.")
parser.add_argument("-o", metavar="outputfilename", type=str, help="Output to a file instead of STDOUT.")
parser.add_argument("files", metavar="file", type=str, nargs="+",help="Files to fix.")
args=parser.parse_args()
if args.w != None:
if args.w >= MIN_LENGTH:
maxlength = args.w
else:
STDERR.write("Error - you have specified a length [" + str(args.w) + "] smaller than the minimum possible ["+ str(MIN_LENGTH) + "]\n")
sys.exit(2)
if args.o and args.i:
STDERR.write("Error - you cannot both write output to a separate file and write it in place.\n")
sys.exit(1)
else:
if args.o != None:
outfile = open(args.o, 'w')
output = outfile
if args.c:
for a in args.files:
check_file(a, maxlength=maxlength, report=output)
elif args.i != None:
import os
for a in args.files:
if not check_file(a):
STDERR.write("Fixing file: " + a + "\n")
os.rename(a, a + args.i)
inplacefile = open(a, 'w')
force_fix_file(a + args.i, maxlength=maxlength, output=inplacefile)
inplacefile.close()
else:
for a in args.files:
fix_file(a, maxlength=maxlength, output=output)
if args.o != None:
outfile.close()
if __name__ == "__main__":
main()<|fim▁end|>
|
for line in infile:
if len(line) > maxlength + 1:
|
<|file_name|>position.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%! from data import to_rust_ident %>
<%namespace name="helpers" file="/helpers.mako.rs" />
<% from data import ALL_SIZES, PHYSICAL_SIDES, LOGICAL_SIDES %>
<% data.new_style_struct("Position", inherited=False) %>
// "top" / "left" / "bottom" / "right"
% for side in PHYSICAL_SIDES:
${helpers.predefined_type(side, "LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
animatable=True)}
% endfor
// offset-* logical properties, map to "top" / "left" / "bottom" / "right"
% for side in LOGICAL_SIDES:
${helpers.predefined_type("offset-" + side, "LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
animatable=True, logical=True)}
% endfor
<%helpers:longhand name="z-index" animatable="True">
use values::NoViewportPercentage;
use values::computed::ComputedValueAsSpecified;
impl ComputedValueAsSpecified for SpecifiedValue {}
impl NoViewportPercentage for SpecifiedValue {}
pub type SpecifiedValue = computed_value::T;
pub mod computed_value {
use std::fmt;
use style_traits::ToCss;
#[derive(PartialEq, Clone, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum T {
Auto,
Number(i32),
}
impl ToCss for T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
T::Auto => dest.write_str("auto"),
T::Number(number) => write!(dest, "{}", number),
}
}
}
impl T {
pub fn number_or_zero(self) -> i32 {
match self {
T::Auto => 0,
T::Number(value) => value,
}
}
}
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
computed_value::T::Auto
}
fn parse(_context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
if input.try(|input| input.expect_ident_matching("auto")).is_ok() {
Ok(computed_value::T::Auto)
} else {
specified::parse_integer(input).map(computed_value::T::Number)
}
}
</%helpers:longhand>
// CSS Flexible Box Layout Module Level 1
// http://www.w3.org/TR/css3-flexbox/
// Flex container properties
${helpers.single_keyword("flex-direction", "row row-reverse column column-reverse",
animatable=False)}
${helpers.single_keyword("flex-wrap", "nowrap wrap wrap-reverse",
animatable=False)}
// FIXME(stshine): The type of 'justify-content' and 'align-content' is uint16_t in gecko
// FIXME(stshine): Its higher bytes are used to store fallback value. Disable them in geckolib for now
${helpers.single_keyword("justify-content", "flex-start flex-end center space-between space-around",
gecko_constant_prefix="NS_STYLE_JUSTIFY",
products="servo",
animatable=False)}
// https://drafts.csswg.org/css-flexbox/#propdef-align-items
// FIXME: This is a workaround for 'normal' value. We don't support the Gecko initial value 'normal' yet.
${helpers.single_keyword("align-items", "stretch flex-start flex-end center baseline" if product == "servo"
else "normal stretch flex-start flex-end center baseline",
need_clone=True,
gecko_constant_prefix="NS_STYLE_ALIGN",
animatable=False)}
${helpers.single_keyword("align-content", "stretch flex-start flex-end center space-between space-around",
gecko_constant_prefix="NS_STYLE_ALIGN",
products="servo",
animatable=False)}
// Flex item properties
${helpers.predefined_type("flex-grow", "Number",
"0.0", "parse_non_negative",
needs_context=False,
animatable=True)}
${helpers.predefined_type("flex-shrink", "Number",
"1.0", "parse_non_negative",
needs_context=False,
animatable=True)}
// https://drafts.csswg.org/css-align/#align-self-property
// FIXME: We don't support the Gecko value 'normal' yet.
${helpers.single_keyword("align-self", "auto stretch flex-start flex-end center baseline",
need_clone=True,<|fim▁hole|>// https://drafts.csswg.org/css-flexbox/#propdef-order
<%helpers:longhand name="order" animatable="True">
use values::computed::ComputedValueAsSpecified;
impl ComputedValueAsSpecified for SpecifiedValue {}
pub type SpecifiedValue = computed_value::T;
pub mod computed_value {
pub type T = i32;
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
0
}
fn parse(_context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
specified::parse_integer(input)
}
</%helpers:longhand>
// FIXME: This property should be animatable.
${helpers.predefined_type("flex-basis",
"LengthOrPercentageOrAutoOrContent",
"computed::LengthOrPercentageOrAutoOrContent::Auto",
animatable=False)}
% for (size, logical) in ALL_SIZES:
// width, height, block-size, inline-size
${helpers.predefined_type("%s" % size,
"LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
"parse_non_negative",
needs_context=False,
animatable=True, logical = logical)}
// min-width, min-height, min-block-size, min-inline-size
${helpers.predefined_type("min-%s" % size,
"LengthOrPercentage",
"computed::LengthOrPercentage::Length(Au(0))",
"parse_non_negative",
needs_context=False,
animatable=True, logical = logical)}
// max-width, max-height, max-block-size, max-inline-size
${helpers.predefined_type("max-%s" % size,
"LengthOrPercentageOrNone",
"computed::LengthOrPercentageOrNone::None",
"parse_non_negative",
needs_context=False,
animatable=True, logical = logical)}
% endfor
${helpers.single_keyword("box-sizing",
"content-box border-box",
animatable=False)}
// CSS Image Values and Replaced Content Module Level 3
// https://drafts.csswg.org/css-images-3/
${helpers.single_keyword("object-fit", "fill contain cover none scale-down",
products="gecko", animatable=False)}
// https://drafts.csswg.org/css-grid/#propdef-grid-row-start
<% grid_longhands = ["grid-row-start", "grid-row-end", "grid-column-start", "grid-column-end"] %>
% for longhand in grid_longhands:
${helpers.predefined_type("%s" % longhand,
"GridLine",
"Default::default()",
animatable=False,
products="gecko")}
% endfor<|fim▁end|>
|
extra_gecko_values="normal",
gecko_constant_prefix="NS_STYLE_ALIGN",
animatable=False)}
|
<|file_name|>Mapper_WinUPnP.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2001-2015 Jacek Sieka, arnetheduck on gmail point com
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#include "stdinc.h"
#include "Mapper_WinUPnP.h"
#include "Util.h"
#include "Text.h"
#include "w.h"
#include "AirUtil.h"
#ifdef HAVE_WINUPNP_H
#include <ole2.h>
#include <natupnp.h>
#else // HAVE_WINUPNP_H
struct IUPnPNAT { };
struct IStaticPortMappingCollection { };
#endif // HAVE_WINUPNP_H
namespace dcpp {
const string Mapper_WinUPnP::name = "Windows UPnP";
Mapper_WinUPnP::Mapper_WinUPnP(const string& localIp, bool v6) :
Mapper(localIp, v6)
{
}
bool Mapper_WinUPnP::supportsProtocol(bool aV6) const {
return !aV6;
}
#ifdef HAVE_WINUPNP_H
bool Mapper_WinUPnP::init() {
HRESULT hr = ::CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
if(FAILED(hr))
return false;
if(pUN)
return true;
// Lacking the __uuidof in mingw...
CLSID upnp;
OLECHAR upnps[] = L"{AE1E00AA-3FD5-403C-8A27-2BBDC30CD0E1}";
CLSIDFromString(upnps, &upnp);
IID iupnp;
OLECHAR iupnps[] = L"{B171C812-CC76-485A-94D8-B6B3A2794E99}";
CLSIDFromString(iupnps, &iupnp);
pUN = 0;
hr = ::CoCreateInstance(upnp, 0, CLSCTX_INPROC_SERVER, iupnp, reinterpret_cast<LPVOID*>(&pUN));
if(FAILED(hr))
pUN = 0;
return pUN ? true : false;
}
void Mapper_WinUPnP::uninit() {
::CoUninitialize();
}
bool Mapper_WinUPnP::add(const string& port, const Protocol protocol, const string& description) {
IStaticPortMappingCollection* pSPMC = getStaticPortMappingCollection();
if(!pSPMC)
return false;
/// @todo use a BSTR wrapper
BSTR protocol_ = SysAllocString(Text::toT(protocols[protocol]).c_str());
BSTR description_ = SysAllocString(Text::toT(description).c_str());
BSTR localIP = !localIp.empty() ? SysAllocString(Text::toT(localIp).c_str()) : nullptr;
auto port_ = Util::toInt(port);
IStaticPortMapping* pSPM = 0;
HRESULT hr = pSPMC->Add(port_, protocol_, port_, localIP, VARIANT_TRUE, description_, &pSPM);
SysFreeString(protocol_);
SysFreeString(description_);
SysFreeString(localIP);
bool ret = SUCCEEDED(hr);
if(ret) {
pSPM->Release();
lastPort = port_;
lastProtocol = protocol;
}
pSPMC->Release();
return ret;
}
bool Mapper_WinUPnP::remove(const string& port, const Protocol protocol) {
IStaticPortMappingCollection* pSPMC = getStaticPortMappingCollection();
if(!pSPMC)
return false;
/// @todo use a BSTR wrapper
BSTR protocol_ = SysAllocString(Text::toT(protocols[protocol]).c_str());
auto port_ = Util::toInt(port);
HRESULT hr = pSPMC->Remove(port_, protocol_);
pSPMC->Release();
SysFreeString(protocol_);
bool ret = SUCCEEDED(hr);
if(ret && port_ == lastPort && protocol == lastProtocol) {
lastPort = 0;
}
return ret;
}
string Mapper_WinUPnP::getDeviceName() {
/// @todo use IUPnPDevice::ModelName <http://msdn.microsoft.com/en-us/library/aa381670(VS.85).aspx>?
return Util::emptyString;
}
string Mapper_WinUPnP::getExternalIP() {
// Get the External IP from the last added mapping
if(!lastPort)
return Util::emptyString;
IStaticPortMappingCollection* pSPMC = getStaticPortMappingCollection();
if(!pSPMC)
return Util::emptyString;
/// @todo use a BSTR wrapper
BSTR protocol_ = SysAllocString(Text::toT(protocols[lastProtocol]).c_str());
// Lets Query our mapping
IStaticPortMapping* pSPM;
HRESULT hr = pSPMC->get_Item(lastPort, protocol_, &pSPM);
SysFreeString(protocol_);
// Query failed!
if(FAILED(hr) || !pSPM) {
pSPMC->Release();
return Util::emptyString;
}
BSTR bstrExternal = 0;
hr = pSPM->get_ExternalIPAddress(&bstrExternal);
if(FAILED(hr) || !bstrExternal) {
pSPM->Release();
pSPMC->Release();
return Util::emptyString;
}
// convert the result
string ret = Text::wideToAcp(bstrExternal);
// no longer needed
SysFreeString(bstrExternal);
// no longer needed
pSPM->Release();
pSPMC->Release();
<|fim▁hole|> return ret;
}
IStaticPortMappingCollection* Mapper_WinUPnP::getStaticPortMappingCollection() {
if(!pUN)
return 0;
IStaticPortMappingCollection* ret = 0;
HRESULT hr = 0;
// some routers lag here
for(int i = 0; i < 3; i++) {
hr = pUN->get_StaticPortMappingCollection (&ret);
if(SUCCEEDED(hr) && ret) break;
Sleep(1500);
}
if(FAILED(hr))
return 0;
return ret;
}
#else // HAVE_WINUPNP_H
bool Mapper_WinUPnP::init() {
return false;
}
void Mapper_WinUPnP::uninit() {
}
bool Mapper_WinUPnP::add(const string& /*port*/, const Protocol /*protocol*/, const string& /*description*/) {
return false;
}
bool Mapper_WinUPnP::remove(const string& /*port*/, const Protocol /*protocol*/) {
return false;
}
string Mapper_WinUPnP::getDeviceName() {
return Util::emptyString;
}
string Mapper_WinUPnP::getExternalIP() {
return Util::emptyString;
}
IStaticPortMappingCollection* Mapper_WinUPnP::getStaticPortMappingCollection() {
return 0;
}
#endif // HAVE_WINUPNP_H
} // dcpp namespace<|fim▁end|>
| |
<|file_name|>test_spec.py<|end_file_name|><|fim▁begin|>import json
from tempfile import mkdtemp
from os.path import join, basename
from shutil import rmtree
from distutils.dir_util import copy_tree
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks
from slyd.projectspec import create_project_resource
from slyd.projectspec import convert_template
from .utils import TestSite, test_spec_manager
from .settings import SPEC_DATA_DIR
class CrawlerSpecTest(unittest.TestCase):
spider = """<|fim▁hole|> ".+MobileHomePark.php?key=d+"
],
"links_to_follow": "patterns",
"respect_nofollow": true,
"start_urls": [
"http://www.mhvillage.com/"
],
"templates": []
}
"""
def setUp(self):
sm = test_spec_manager()
spec_resource = create_project_resource(sm)
self.temp_project_dir = mkdtemp(dir=SPEC_DATA_DIR,
prefix='test-run-')
self.project = basename(self.temp_project_dir)
self.specsite = TestSite(spec_resource, project=self.project)
test_project_dir = join(SPEC_DATA_DIR, 'test')
copy_tree(test_project_dir, self.temp_project_dir)
@inlineCallbacks
def _get_check_resource(self, resource, converter=None):
result = yield self.specsite.get(resource)
ffile = join(self.temp_project_dir, resource + ".json")
fdata = json.load(open(ffile))
if converter:
converter(fdata)
rdata = json.loads(result.value())
self.assertEqual(fdata, rdata)
def test_get_resource(self):
self._get_check_resource("project")
self._get_check_resource("spiders/pinterest.com",
convert_template)
@inlineCallbacks
def post_command(self, spider, cmd, *args, **kwargs):
obj = {'cmd': cmd, 'args': args}
result = yield self.specsite.post(spider, data=json.dumps(obj))
self.assertEqual(result.responseCode, kwargs.get('expect', 200))
@inlineCallbacks
def test_updating(self):
result = yield self.specsite.post('spiders/testpost', data=self.spider)
self.assertEqual(result.responseCode, 200)
result = yield self.specsite.get('spiders/testpost')
self.assertEqual(json.loads(result.value()), json.loads(self.spider))
# should fail - missing required fields
result = yield self.specsite.post('spiders/testpost', data='{}')
self.assertEqual(result.responseCode, 400)
@inlineCallbacks
def test_commands(self):
self.post_command('spiders', 'unknown', expect=400)
self.post_command('spiders', 'mv', expect=400)
self.post_command('spiders', 'mv', '../notallowed', 'whatever',
expect=400)
self.post_command('spiders', 'mv', 'notallowedexists', 'whatever',
expect=404)
self.post_command('spiders', 'rm', 'notexists', expect=404)
# TODO: mv to existing spider - 400
yield self.specsite.post('spiders/c', data=self.spider)
self._get_check_resource('spiders/c')
self.post_command('spiders', 'mv', 'c', 'c2')
result = yield self.specsite.get('spiders/c')
self.assertEqual(result.value(), '{}\n')
self._get_check_resource('spiders/c2')
yield self.specsite.post('spiders/c3', data=self.spider)
# overwrites
self.post_command('spiders', 'mv', 'c2', 'c3')
result = yield self.specsite.get('spiders/c2')
self.assertEqual(result.value(), '{}\n')
self.post_command('spiders', 'rm', 'c3')
result = yield self.specsite.get('spiders/c3')
self.assertEqual(result.value(), '{}\n')
def tearDown(self):
rmtree(self.temp_project_dir)<|fim▁end|>
|
{
"exclude_patterns": [],
"follow_patterns": [
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
from setuptools import setup
from distutils.cmd import Command
import django_auth_iam
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
setup(
name='django-auth-iam',
version=django_auth_iam.__version__,
description='Django authentication backend using Amazon IAM',
long_description=read('README.rst'),
url='https://github.com/viewworld/django-auth-iam/',
author='Michael Budde',
author_email='[email protected]',
license='GPL v3',
packages=['django_auth_iam'],
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'License :: OSI Approved :: GNU General Public License (GPL)',<|fim▁hole|> 'Topic :: System :: Systems Administration :: Authentication/Directory',
],
keywords=['django', 'amazon', 'authentication', 'auth'],
install_requires=['boto', 'PyCrypto', 'py_bcrypt'],
)<|fim▁end|>
|
'Programming Language :: Python',
'Topic :: Security',
|
<|file_name|>ChildWatch.spec.ts<|end_file_name|><|fim▁begin|>import {assert} from 'chai';
import {Job} from '../Job';
import {BlockIO} from '../BlockProperty';
describe('Block Child Watch', function () {
it('basic', function () {
let job = new Job();
<|fim▁hole|> let watchLog: any[] = [];
let watch = {
onChildChange(property: BlockIO, saved: boolean) {
watchLog.push([property._name, property._value != null, Boolean(saved)]);
},
};
job.watch(watch);
job.createBlock('a');
assert.deepEqual(watchLog, [['a', true, true]], 'new block');
watchLog = [];
job.createOutputBlock('a');
assert.deepEqual(watchLog, [['a', true, false]], 'replace with temp block');
watchLog = [];
job.createBlock('a');
assert.deepEqual(watchLog, [['a', true, true]], 'replace with normal block');
watchLog = [];
job.setValue('a', null);
assert.deepEqual(watchLog, [['a', false, true]], 'remove block');
watchLog = [];
job.createOutputBlock('a');
assert.deepEqual(watchLog, [['a', true, false]], 'new temp block');
watchLog = [];
job.setBinding('a', 'b');
assert.deepEqual(watchLog, [['a', false, false]], 'remove block with binding');
watchLog = [];
});
});<|fim▁end|>
| |
<|file_name|>dvp_demo.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2009-2011 Texas Instruments, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*! \file
* \brief This is a complex example of how to use DVP to do vision processing
* into a series of display buffers previewed on the dvp_display subsystem.
* \author Erik Rainey <[email protected]>
*/
#if defined(DVP_USE_IMGLIB)
#include <imglib/dvp_kl_imglib.h>
#endif
#if defined(DVP_USE_VLIB)
#include <vlib/dvp_kl_vlib.h>
#endif
#include <dvp/VisionCam.h>
#include <dvp/dvp_display.h>
#if defined(PC)
#define DVP_DEMO_MAX_TIMEOUTS (10000)
#else
#define DVP_DEMO_MAX_TIMEOUTS (10)
#endif
typedef struct _dvp_demo_t {
#ifdef VCAM_AS_SHARED
module_t mod;
#endif
uint32_t numDisplayImages;
VisionCamFactory_f factory;
VisionCam *pCam;
queue_t *frameq;
dvp_display_t *dvpd;
DVP_Image_t *displays; /**< Buffers from the DVP Display */
DVP_Image_t *subImages; /**< Subsections of the Display Image */
DVP_Image_t *camImages; /**< GCam Images */
DVP_Image_t *images; /**< DVP Graph Images */
DVP_Handle dvp;
DVP_KernelNode_t *nodes; /**< The Kernel Graph Nodes */
DVP_KernelGraph_t *graph;
uint32_t numNodes;
} DVP_Demo_t;
void DVPCallback(void *cookie, DVP_KernelGraph_t *graph, DVP_U32 sectionIndex, DVP_U32 numNodesExecuted)
{
cookie = cookie; // warnings
graph = graph; // warnings
sectionIndex = sectionIndex; // warnings
numNodesExecuted = numNodesExecuted; // warnings
DVP_PRINT(DVP_ZONE_ALWAYS, "Cookie %p Graph %p Graph Section %u Completed %u nodes\n", cookie, graph, sectionIndex, numNodesExecuted);
}
void VisionCamCallback(VisionCamFrame * cameraFrame)
{
DVP_Image_t *pImage = (DVP_Image_t *)cameraFrame->mFrameBuff;
queue_t *frameq = (queue_t *)cameraFrame->mCookie;
DVP_PRINT(DVP_ZONE_CAM, "Writing Frame %p into Queue %p\n", cameraFrame, frameq);
DVP_PrintImage(DVP_ZONE_CAM, pImage);
if (queue_write(frameq, true_e, &cameraFrame) == false_e)
{
DVP_PRINT(DVP_ZONE_ERROR, "Failed to write frame to queue\n");
}
}
bool_e VisionCamInit(DVP_Demo_t *demo, VisionCam_e camType, uint32_t width, uint32_t height, uint32_t fps, uint32_t rotation, uint32_t color)
{
int32_t ret = 0;
#ifdef VCAM_AS_SHARED
demo->mod = module_load(CAMERA_NAME);
if (demo->mod)
{
demo->factory = (VisionCamFactory_f)module_symbol(demo->mod, "VisionCamFactory");
if (demo->factory)
{
demo->pCam = demo->factory(VISIONCAM_OMX);
if (demo->pCam)
{
#else
demo->pCam = VisionCamFactory(camType);
if (demo->pCam)
{
#endif
VisionCamSensorSelection sensorIndex = VCAM_SENSOR_SECONDARY;
#if defined(DUCATI_1_5) || defined(DUCATI_2_0)
VisionCamCaptureMode capmode = VCAM_GESTURE_MODE;
#else
VisionCamCaptureMode capmode = VCAM_VIDEO_NORMAL;
#endif
VisionCamFlickerType flicker = FLICKER_60Hz;
VisionCamFocusMode focus = VCAM_FOCUS_CONTROL_AUTO;
VisionCamWhiteBalType white = VCAM_WHITE_BAL_CONTROL_AUTO;
int32_t brightness = 50;
// initialize the VisionCam
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->init(demo->frameq));
// configure the parameters
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_WIDTH, &width, sizeof(width)));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_HEIGHT, &height, sizeof(height)));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_COLOR_SPACE_FOURCC, &color, sizeof(color)));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_CAP_MODE, &capmode, sizeof(capmode)));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_SENSOR_SELECT, &sensorIndex, sizeof(sensorIndex)));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_FPS_FIXED, &fps, sizeof(fps)));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_FLICKER, &flicker, sizeof(flicker)));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_BRIGHTNESS, &brightness, sizeof(brightness)));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_AWB_MODE, &white, sizeof(white)));
// configure the buffers (the first X images are for the camera)
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->useBuffers(demo->camImages, demo->numDisplayImages));
// @todo BUG: Can't set rotation until after useBuffers
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_ROTATION, &rotation, sizeof(rotation)));
// register the callback
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->enablePreviewCbk(VisionCamCallback));
// start the preview
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->sendCommand(VCAM_CMD_PREVIEW_START));
// do the autofocus
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->setParameter(VCAM_PARAM_DO_AUTOFOCUS, &focus, sizeof(focus)));
return true_e;
}
#ifdef VCAM_AS_SHARED
}
}
}
#endif
return false_e;
}
bool_e VisionCamDeinit(DVP_Demo_t *demo)
{
int32_t ret = 0;
if (demo->pCam)
{
// destroy the camera
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->sendCommand(VCAM_CMD_PREVIEW_STOP));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->disablePreviewCbk(VisionCamCallback));
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->releaseBuffers());
VCAM_COMPLAIN_IF_FAILED(ret, demo->pCam->deinit());
delete demo->pCam;
demo->pCam = NULL;
}
#ifdef VCAM_AS_SHARED
module_unload(demo->mod);
#endif
if (ret != 0)
return false_e;
else
return true_e;
}
bool_e SubsectionImage(DVP_Image_t *dispFrame, DVP_Image_t *image, uint32_t index)
{
if (dispFrame->planes == image->planes &&
dispFrame->color == image->color)
{
uint32_t limit_i = dispFrame->width / image->width;
uint32_t limit_j = dispFrame->height / image->height;
uint32_t i = index % limit_i;
uint32_t j = index / limit_i;
DVP_PRINT(DVP_ZONE_ALWAYS, "Requested Index %u in Image of {%u,%u} (%ux%u => %ux%u)\n",index, i,j, dispFrame->width, dispFrame->height, image->width, image->height);
DVP_PrintImage(DVP_ZONE_ALWAYS, dispFrame);
if (j > limit_j)
return false_e;
else
{
uint32_t p = 0;
// make sure th strides are transfered.
image->y_stride = dispFrame->y_stride;
// create each subimage plane from the display frame.
for (p = 0; p < dispFrame->planes; p++)
{
uint32_t k = (j * (image->height * dispFrame->y_stride)) +
(i * (image->width * dispFrame->x_stride));
image->pData[p] = &dispFrame->pData[p][k];
image->pBuffer[p] = &dispFrame->pData[p][k];
DVP_PrintImage(DVP_ZONE_ALWAYS, image);
}
return true_e;
}
}
else
return false_e;
}
int main(int argc, char *argv[])
{
int ret = 0;
#if (defined(DVP_USE_VLIB) || defined(DVP_USE_YUV)) && defined(DVP_USE_IMGLIB)
DVP_Demo_t *demo = (DVP_Demo_t *)calloc(1, sizeof(DVP_Demo_t));
#if defined(SOSAL_RUNTIME_DEBUG)
debug_get_zone_mask("SOSAL_ZONE_MASK", &sosal_zone_mask);
#endif
#if defined(DVP_RUNTIME_DEBUG)
debug_get_zone_mask("DVP_ZONE_MASK", &dvp_zone_mask);
#endif
if (demo && argc >= 1)
{
uint32_t i,j,k,n;
uint32_t display_width = (argc > 1?atoi(argv[1]):640);
uint32_t display_height = (argc > 2?atoi(argv[2]):480);
uint32_t width = (argc > 3?atoi(argv[3]):160);
uint32_t height = (argc > 4?atoi(argv[4]):120);
uint32_t fps = 30;
uint32_t numFrames = (argc > 5?atoi(argv[5]):100); // how many frames to display before quitting
uint32_t numSubImages = (display_width/width) * (display_height/height);
uint32_t numGraphImages = 20; /// @note make sure this matches the numbers used below
int32_t focusDepth = 10;
int32_t frameLock = 100;
demo->numDisplayImages = DVP_DISPLAY_NUM_BUFFERS - 1;
uint32_t numImages = numSubImages * demo->numDisplayImages;
VisionCam_e camType = VISIONCAM_OMX;
#if defined(PC)
camType = VISIONCAM_USB;
#endif
demo->frameq = queue_create(demo->numDisplayImages * VCAM_PORT_MAX, sizeof(VisionCamFrame *));
demo->dvpd = DVP_Display_Create(display_width, display_height, display_width, display_height, DVP_DISPLAY_WIDTH, DVP_DISPLAY_HEIGHT, display_width, display_height, 0, 0, FOURCC_UYVY, 0, DVP_DISPLAY_NUM_BUFFERS);
demo->subImages = (DVP_Image_t *)calloc(numImages, sizeof(DVP_Image_t));
demo->displays = (DVP_Image_t *)calloc(demo->numDisplayImages, sizeof(DVP_Image_t));
demo->camImages = (DVP_Image_t *)calloc(demo->numDisplayImages, sizeof(DVP_Image_t));
demo->images = (DVP_Image_t *)calloc(numGraphImages, sizeof(DVP_Image_t));
demo->dvp = DVP_KernelGraph_Init();
if (demo->frameq && demo->dvpd && demo->subImages && demo->displays && demo->camImages && demo->dvp && demo->images)
{
// initialize the display buffers
for (n = 0; n < demo->numDisplayImages; n++)
{
DVP_Image_Init(&demo->displays[n], display_width, display_height, FOURCC_UYVY);
DVP_Display_Alloc(demo->dvpd, &demo->displays[n]);
DVP_Image_Alloc(demo->dvp, &demo->displays[n], (DVP_MemType_e)demo->displays[n].memType);
DVP_Image_Init(&demo->camImages[n], width, height, FOURCC_UYVY);
// Blank the Images
for (i = 0; i < demo->displays[n].planes; i++)
for (j = 0; j < demo->displays[n].height; j++)
memset(DVP_Image_Addressing(&demo->displays[n], 0, j, i),
0x80,
DVP_Image_LineSize(&demo->displays[n], i));
// initialize images which are the subsections of the display buffers
for (i = 0; i < numSubImages; i++)
{
uint32_t k = (n * numSubImages) + i;
DVP_Image_Init(&demo->subImages[k], width, height, FOURCC_UYVY);
SubsectionImage(&demo->displays[n], &demo->subImages[k], i);
if (i == 0)
{
// if this is the first index of the subsections,
// use this as the camera buffer
memcpy(&demo->camImages[n], &demo->subImages[k],sizeof(DVP_Image_t));
}
}
}
// initialize the DVP Nodes and Graphs
i = 0;
uint32_t idx_a9, len_a9;
uint32_t idx_dsp, len_dsp;
uint32_t idx_m3, len_m3;
uint32_t idx_conv, len_conv;
uint32_t idx_mask_x, idx_scratch, idx_mask_y, idx_scratch2;
// A9
idx_a9 = i;
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // LUMA
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // EDGE SOBEL
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // EDGE PREWITT
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // EDGE SCHARR
len_a9 = i - idx_a9;
for (j = 0; j < len_a9; j++) {
if (DVP_Image_Alloc(demo->dvp, &demo->images[idx_a9+j], DVP_MTYPE_MPUCACHED_VIRTUAL) == DVP_FALSE) {
DVP_PRINT(DVP_ZONE_ERROR, "ERROR: Failed to allocate A9 image\n");
}
}
// DSP
idx_dsp = i;
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // LUMA
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // CONV 3x3 Gx
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // CONV 3x3 Gy
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // DILATE
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // IIR H
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // IIR V
len_dsp = i - idx_dsp;
for (j = 0; j < len_dsp; j++) {
#if defined(DVP_USE_TILER)
if (DVP_Image_Alloc(demo->dvp, &demo->images[idx_dsp+j], DVP_MTYPE_MPUCACHED_1DTILED) == DVP_FALSE) {
DVP_PRINT(DVP_ZONE_ERROR, "ERROR: Failed to allocate DSP image\n");
}
#else
if (DVP_Image_Alloc(demo->dvp, &demo->images[idx_dsp+j], DVP_MTYPE_DEFAULT) == DVP_FALSE) {
DVP_PRINT(DVP_ZONE_ERROR, "ERROR: Failed to allocate DSP image\n");
}
#endif
}
// SIMCOP
idx_m3 = i;
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // LUMA
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // SOBEL Gx
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // SOBEL Gy
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // IIR H
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800); // IIR V
len_m3 = i - idx_m3;
for (j = 0; j < len_m3; j++) {
#if defined(DVP_USE_TILER)
if (DVP_Image_Alloc(demo->dvp, &demo->images[idx_m3+j], DVP_MTYPE_MPUNONCACHED_2DTILED) == DVP_FALSE) {
DVP_PRINT(DVP_ZONE_ERROR, "ERROR: Failed to allocate M3 image\n");
}
#else
if (DVP_Image_Alloc(demo->dvp, &demo->images[idx_m3+j], DVP_MTYPE_DEFAULT) == DVP_FALSE) {
DVP_PRINT(DVP_ZONE_ERROR, "ERROR: Failed to allocate M3 image\n");
}
#endif
}
idx_conv = i; // the display conversion images start here
len_conv = i; // we want to convert all these images;
// Mask & Scratch
idx_mask_x = i;
DVP_Image_Init(&demo->images[i++], 3, 3, FOURCC_Y800);
DVP_Image_Alloc(demo->dvp, &demo->images[idx_mask_x], DVP_MTYPE_MPUCACHED_VIRTUAL);
idx_mask_y = i;
DVP_Image_Init(&demo->images[i++], 3, 3, FOURCC_Y800);
DVP_Image_Alloc(demo->dvp, &demo->images[idx_mask_y], DVP_MTYPE_MPUCACHED_VIRTUAL);
idx_scratch = i;
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800);
DVP_Image_Alloc(demo->dvp, &demo->images[idx_scratch], DVP_MTYPE_MPUCACHED_VIRTUAL);
idx_scratch2 = i;
DVP_Image_Init(&demo->images[i++], width, height, FOURCC_Y800);
DVP_Image_Alloc(demo->dvp, &demo->images[idx_scratch2], DVP_MTYPE_MPUCACHED_VIRTUAL);
// fill in the mask with the SOBEL X Gradient edge filter
demo->images[idx_mask_x].pData[0][0] = (uint8_t)-1;
demo->images[idx_mask_x].pData[0][1] = (uint8_t) 0;
demo->images[idx_mask_x].pData[0][2] = (uint8_t) 1;
demo->images[idx_mask_x].pData[0][3] = (uint8_t)-2;
demo->images[idx_mask_x].pData[0][4] = (uint8_t) 0;
demo->images[idx_mask_x].pData[0][5] = (uint8_t) 2;
demo->images[idx_mask_x].pData[0][6] = (uint8_t)-1;
demo->images[idx_mask_x].pData[0][7] = (uint8_t) 0;
demo->images[idx_mask_x].pData[0][8] = (uint8_t) 1;
// fill in the mask with the SOBEL Y Gradient edge filter
demo->images[idx_mask_y].pData[0][0] = (uint8_t)-1;
demo->images[idx_mask_y].pData[0][1] = (uint8_t)-2;
demo->images[idx_mask_y].pData[0][2] = (uint8_t)-1;
demo->images[idx_mask_y].pData[0][3] = (uint8_t) 0;
demo->images[idx_mask_y].pData[0][4] = (uint8_t) 0;
demo->images[idx_mask_y].pData[0][5] = (uint8_t) 0;
demo->images[idx_mask_y].pData[0][6] = (uint8_t) 1;
demo->images[idx_mask_y].pData[0][7] = (uint8_t) 2;
demo->images[idx_mask_y].pData[0][8] = (uint8_t) 1;
demo->numNodes = len_a9 + len_dsp + len_m3 + len_conv;
DVP_PRINT(DVP_ZONE_ALWAYS, "Allocating %u %ux%u images\n", demo->numNodes, width, height);
// Allocate the Nodes
demo->nodes = DVP_KernelNode_Alloc(demo->dvp, demo->numNodes);
if (demo->nodes == NULL)
return STATUS_NOT_ENOUGH_MEMORY;
DVP_KernelGraphSection_t sections[] = {
{&demo->nodes[idx_a9], len_a9, DVP_PERF_INIT, DVP_CORE_LOAD_INIT, DVP_FALSE},
{&demo->nodes[idx_dsp], len_dsp, DVP_PERF_INIT, DVP_CORE_LOAD_INIT, DVP_FALSE},
{&demo->nodes[idx_m3], len_m3, DVP_PERF_INIT, DVP_CORE_LOAD_INIT, DVP_FALSE},
{&demo->nodes[idx_conv], len_conv, DVP_PERF_INIT, DVP_CORE_LOAD_INIT, DVP_FALSE},
};
DVP_U32 order[] = {0,0,0,1}; // 3 parallel then 1 series
DVP_KernelGraph_t graph = {
sections,
dimof(sections),
order,
DVP_PERF_INIT,
DVP_FALSE,
};
DVP_Transform_t *io = NULL;
DVP_Morphology_t *morph = NULL;
DVP_ImageConvolution_t *img = NULL;
DVP_IIR_t *iir = NULL;
demo->graph = &graph;
i = 0;
// Now initialize the node structures for exactly what we want
// A9 Section
demo->nodes[i].header.kernel = DVP_KN_XYXY_TO_Y800;
demo->nodes[i].header.affinity = DVP_CORE_CPU;
io = dvp_knode_to(&demo->nodes[i], DVP_Transform_t);
io->input = demo->camImages[0];
io->output = demo->images[idx_a9];
i++;
demo->nodes[i].header.kernel = DVP_KN_SOBEL_8;
demo->nodes[i].header.affinity = DVP_CORE_CPU;
io = dvp_knode_to(&demo->nodes[i], DVP_Transform_t);
io->input = demo->images[idx_a9];
io->output = demo->images[idx_a9+1];
i++;
demo->nodes[i].header.kernel = DVP_KN_PREWITT_8;
demo->nodes[i].header.affinity = DVP_CORE_CPU;
io = dvp_knode_to(&demo->nodes[i], DVP_Transform_t);
io->input = demo->images[idx_a9];
io->output = demo->images[idx_a9+2];<|fim▁hole|>
demo->nodes[i].header.kernel = DVP_KN_SCHARR_8;
demo->nodes[i].header.affinity = DVP_CORE_CPU;
io = dvp_knode_to(&demo->nodes[i], DVP_Transform_t);
io->input = demo->images[idx_a9];
io->output = demo->images[idx_a9+3];
i++;
// DSP Section
demo->nodes[i].header.kernel = DVP_KN_XYXY_TO_Y800;
demo->nodes[i].header.affinity = DVP_CORE_DSP;
io = dvp_knode_to(&demo->nodes[i], DVP_Transform_t);
io->input = demo->camImages[0];
io->output = demo->images[idx_dsp];
i++;
demo->nodes[i].header.kernel = DVP_KN_IMG_CONV_3x3;
demo->nodes[i].header.affinity = DVP_CORE_DSP;
img = dvp_knode_to(&demo->nodes[i], DVP_ImageConvolution_t);
img->input = demo->images[idx_dsp];
img->output = demo->images[idx_dsp+1];
img->mask = demo->images[idx_mask_x];
i++;
demo->nodes[i].header.kernel = DVP_KN_IMG_CONV_3x3;
demo->nodes[i].header.affinity = DVP_CORE_DSP;
img = dvp_knode_to(&demo->nodes[i], DVP_ImageConvolution_t);
img->input = demo->images[idx_dsp+1];
img->output = demo->images[idx_dsp+2];
img->mask = demo->images[idx_mask_y];
i++;
demo->nodes[i].header.kernel = DVP_KN_DILATE_CROSS;
demo->nodes[i].header.affinity = DVP_CORE_DSP;
morph = dvp_knode_to(&demo->nodes[i], DVP_Morphology_t);
morph->input = demo->images[idx_dsp+2];
morph->output = demo->images[idx_dsp+3];
i++;
demo->nodes[i].header.kernel = DVP_KN_IIR_HORZ;
demo->nodes[i].header.affinity = DVP_CORE_DSP;
iir = dvp_knode_to(&demo->nodes[i], DVP_IIR_t);
iir->input = demo->images[idx_dsp];
iir->output = demo->images[idx_dsp+4];
iir->scratch = demo->images[idx_scratch];
iir->weight = 2000;
i++;
demo->nodes[i].header.kernel = DVP_KN_IIR_VERT;
demo->nodes[i].header.affinity = DVP_CORE_DSP;
iir = dvp_knode_to(&demo->nodes[i], DVP_IIR_t);
iir->input = demo->images[idx_dsp];
iir->output = demo->images[idx_dsp+5];
iir->scratch = demo->images[idx_scratch];
iir->weight = 2000;
i++;
// SIMCOP Section
demo->nodes[i].header.kernel = DVP_KN_XYXY_TO_Y800;
demo->nodes[i].header.affinity = DVP_CORE_SIMCOP;
io = dvp_knode_to(&demo->nodes[i], DVP_Transform_t);
io->input = demo->camImages[0];
io->output = demo->images[idx_m3];
i++;
demo->nodes[i].header.kernel = DVP_KN_IMG_CONV_3x3;
demo->nodes[i].header.affinity = DVP_CORE_SIMCOP;
img = dvp_knode_to(&demo->nodes[i], DVP_ImageConvolution_t);
img->input = demo->images[idx_m3];
img->output = demo->images[idx_m3+1];
img->mask = demo->images[idx_mask_x];
i++;
demo->nodes[i].header.kernel = DVP_KN_IMG_CONV_3x3;
demo->nodes[i].header.affinity = DVP_CORE_SIMCOP;
img = dvp_knode_to(&demo->nodes[i], DVP_ImageConvolution_t);
img->input = demo->images[idx_m3+1];
img->output = demo->images[idx_m3+2];
img->mask = demo->images[idx_mask_x];
i++;
demo->nodes[i].header.kernel = DVP_KN_IIR_HORZ;
demo->nodes[i].header.affinity = DVP_CORE_SIMCOP;
iir = dvp_knode_to(&demo->nodes[i], DVP_IIR_t);
iir->input = demo->images[idx_m3];
iir->output = demo->images[idx_m3+3];
iir->scratch = demo->images[idx_scratch2];
iir->weight = 2000;
i++;
demo->nodes[i].header.kernel = DVP_KN_IIR_VERT;
demo->nodes[i].header.affinity = DVP_CORE_SIMCOP;
iir = dvp_knode_to(&demo->nodes[i], DVP_IIR_t);
iir->input = demo->images[idx_m3];
iir->output = demo->images[idx_m3+4];
iir->scratch = demo->images[idx_scratch2];
iir->weight = 2000;
i++;
// CONVERSION for Display Graph
for (j = i, k = 0; j < demo->numNodes; j++, k++) {
demo->nodes[j].header.kernel = DVP_KN_Y800_TO_XYXY;
demo->nodes[j].header.affinity = DVP_CORE_CPU;
io = dvp_knode_to(&demo->nodes[i], DVP_Transform_t);
io->input = demo->images[k];
//io->output = demo->subImages[k]; // this will get replaced as soon as a buffer is returned
}
// initialize the camera
if (VisionCamInit(demo, camType, width, height, fps, 0, FOURCC_UYVY))
{
VisionCamFrame *cameraFrame = NULL;
DVP_Image_t *pImage = NULL;
uint32_t recvFrames = 0;
uint32_t timeouts = 0;
thread_msleep(1000/fps); // wait 1 frame period.
DVP_PRINT(DVP_ZONE_ALWAYS, "VisionCam is initialized, entering queue read loop!\n");
// read from the queue and display the images
do {
bool_e ret = queue_read(demo->frameq, false_e, &cameraFrame);
if (ret == true_e && cameraFrame != NULL)
{
uint32_t idx_disp = 0;
pImage = (DVP_Image_t *)cameraFrame->mFrameBuff;
timeouts = 0;
DVP_PRINT(DVP_ZONE_ALWAYS, "Received Frame %p (%p) from camera\n", pImage, pImage->pData[0]);
// match the pImage with a displays
for (idx_disp = 0; idx_disp < demo->numDisplayImages; idx_disp++)
if (pImage->pData[0] == demo->camImages[idx_disp].pData[0])
break;
DVP_PRINT(DVP_ZONE_ALWAYS, "Image Correlates to Display Buffer %u (%p->%p)\n", idx_disp, &demo->displays[idx_disp], demo->displays[idx_disp].pData[0]);
// update the DVP Graphs with the new camera image
dvp_knode_to(&demo->nodes[idx_a9], DVP_Transform_t)->input = *pImage;
dvp_knode_to(&demo->nodes[idx_dsp], DVP_Transform_t)->input = *pImage;
dvp_knode_to(&demo->nodes[idx_m3], DVP_Transform_t)->input = *pImage;
// update the conversion array
for (i = 0; i < len_conv; i++) {
// add one to the subImages index to skip the camera preview in that
// frame.
dvp_knode_to(&demo->nodes[idx_conv+i], DVP_Transform_t)->output = demo->subImages[(idx_disp * numSubImages) + i + 1];
}
// run the DVP Kernel Graph
DVP_KernelGraph_Process(demo->dvp, demo->graph, demo, DVPCallback);
// update the display
DVP_Display_Render(demo->dvpd, &demo->displays[idx_disp]);
demo->pCam->returnFrame(cameraFrame);
recvFrames++;
if (recvFrames > numFrames)
break;
if (focusDepth >= 0) {
if (recvFrames == fps) { // after 1 second
demo->pCam->setParameter(VCAM_PARAM_DO_MANUALFOCUS, &focusDepth, sizeof(focusDepth));
}
}
if (frameLock > 0) {
if (recvFrames == (uint32_t)frameLock) {
bool_e lock = true_e;
demo->pCam->sendCommand(VCAM_CMD_LOCK_AE, &lock, sizeof(lock));
demo->pCam->sendCommand(VCAM_CMD_LOCK_AWB, &lock, sizeof(lock));
}
}
}
else
{
DVP_PRINT(DVP_ZONE_ERROR, "Timedout waiting for buffer from Camera!\n");
timeouts++;
thread_msleep(1000/fps);
}
} while (timeouts < DVP_DEMO_MAX_TIMEOUTS);
}
else
{
DVP_PRINT(DVP_ZONE_ERROR, "DVP_DEMO Failed during camera initialization\n");
ret = STATUS_NO_RESOURCES;
}
DVP_PrintPerformanceGraph(demo->dvp, demo->graph);
DVP_KernelNode_Free(demo->dvp, demo->nodes, demo->numNodes);
VisionCamDeinit(demo);
}
else
{
DVP_PRINT(DVP_ZONE_ERROR, "DVP_DEMO Failed during data structure initialization\n");
}
if (demo->dvp) {
DVP_KernelGraph_Deinit(demo->dvp);
}
if (demo->camImages)
free(demo->camImages);
if (demo->displays) {
for (n = 0; n < demo->numDisplayImages; n++) {
DVP_Display_Free(demo->dvpd, &demo->displays[n]);
}
free(demo->displays);
}
if (demo->subImages)
free(demo->subImages);
if (demo->images)
free(demo->images);
if (demo->dvpd)
DVP_Display_Destroy(&demo->dvpd);
if (demo->frameq)
queue_destroy(demo->frameq);
}
#else
DVP_PRINT(DVP_ZONE_ERROR, "Required libraries are not present!\n");
argc |= 1;
argv[0] = argv[0];
ret = -1;
#endif
return ret;
}<|fim▁end|>
|
i++;
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># AWX settings file
import os
def get_secret():
if os.path.exists("/etc/tower/SECRET_KEY"):
return open('/etc/tower/SECRET_KEY', 'rb').read().strip()
ADMINS = ()
STATIC_ROOT = '/var/lib/awx/public/static'
PROJECTS_ROOT = '/var/lib/awx/projects'
AWX_ANSIBLE_COLLECTIONS_PATHS = '/var/lib/awx/vendor/awx_ansible_collections'
JOBOUTPUT_ROOT = '/var/lib/awx/job_status'
SECRET_KEY = get_secret()
ALLOWED_HOSTS = ['*']
# Container environments don't like chroots
AWX_PROOT_ENABLED = False
CLUSTER_HOST_ID = "awx"
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
CSRF_COOKIE_SECURE = False
SESSION_COOKIE_SECURE = False
###############################################################################
# EMAIL SETTINGS
###############################################################################
SERVER_EMAIL = 'root@localhost'
DEFAULT_FROM_EMAIL = 'webmaster@localhost'
EMAIL_SUBJECT_PREFIX = '[AWX] '
EMAIL_HOST = 'localhost'
EMAIL_PORT = 25
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
LOGGING['handlers']['console'] = {
'()': 'logging.StreamHandler',
'level': 'DEBUG',
'formatter': 'simple',
}
LOGGING['loggers']['django.request']['handlers'] = ['console']
LOGGING['loggers']['rest_framework.request']['handlers'] = ['console']
LOGGING['loggers']['awx']['handlers'] = ['console', 'external_logger']
LOGGING['loggers']['awx.main.commands.run_callback_receiver']['handlers'] = ['console']
LOGGING['loggers']['awx.main.tasks']['handlers'] = ['console', 'external_logger']
LOGGING['loggers']['awx.main.scheduler']['handlers'] = ['console', 'external_logger']
LOGGING['loggers']['django_auth_ldap']['handlers'] = ['console']
LOGGING['loggers']['social']['handlers'] = ['console']
LOGGING['loggers']['system_tracking_migrations']['handlers'] = ['console']
LOGGING['loggers']['rbac_migrations']['handlers'] = ['console']<|fim▁hole|>LOGGING['handlers']['task_system'] = {'class': 'logging.NullHandler'}
LOGGING['handlers']['tower_warnings'] = {'class': 'logging.NullHandler'}
LOGGING['handlers']['rbac_migrations'] = {'class': 'logging.NullHandler'}
LOGGING['handlers']['system_tracking_migrations'] = {'class': 'logging.NullHandler'}
LOGGING['handlers']['management_playbooks'] = {'class': 'logging.NullHandler'}
DATABASES = {
'default': {
'ATOMIC_REQUESTS': True,
'ENGINE': 'awx.main.db.profiled_pg',
'NAME': os.getenv("DATABASE_NAME", None),
'USER': os.getenv("DATABASE_USER", None),
'PASSWORD': os.getenv("DATABASE_PASSWORD", None),
'HOST': os.getenv("DATABASE_HOST", None),
'PORT': os.getenv("DATABASE_PORT", None),
}
}
if os.getenv("DATABASE_SSLMODE", False):
DATABASES['default']['OPTIONS'] = {'sslmode': os.getenv("DATABASE_SSLMODE")}
USE_X_FORWARDED_HOST = True
USE_X_FORWARDED_PORT = True<|fim▁end|>
|
LOGGING['loggers']['awx.isolated.manager.playbooks']['handlers'] = ['console']
LOGGING['handlers']['callback_receiver'] = {'class': 'logging.NullHandler'}
|
<|file_name|>extend_corpora.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2018 Deyan Ginev. See the LICENSE
// file at the top-level directory of this distribution.
//
// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>.
// This file may not be copied, modified, or distributed
// except according to those terms.
use cortex::backend::Backend;
use cortex::importer::Importer;
use cortex::models::Corpus;
use std::env;
/// Extends all corpora registered with the `CorTeX` backend, with any new available sources
/// (example usage: arXiv.org releases new source bundles every month, which warrant an update at
/// the same frequency.)
fn main() {
// Note that we realize the initial import via a real cortex worker, but use a simply utility
// script for extensions. this is the case since the goal here is to do a simple sysadmin
// "maintenance update", rather than a full-blown "semantic" union operation
let backend = Backend::default();
// If input is provided, only extend the corpus of the given name/path.
let mut input_args = env::args();
let _ = input_args.next();
let corpora = if let Some(path) = input_args.next() {
if let Ok(corpus) = Corpus::find_by_path(&path, &backend.connection) {
vec![corpus]
} else {
panic!(
"No corpus could be found at path {:?}. Make sure path matches DB registration.",
path
);
}
} else {
backend.corpora()
};
for corpus in corpora {
// First, build an importer, which will perform the extension
let importer = Importer {
corpus: corpus.clone(),
backend: Backend::default(),
cwd: Importer::cwd(),
};
// Extend the already imported corpus. I prefer that method name to "update", as we won't yet
// implement downsizing on deletion.
let extend_start = time::get_time();
println!("-- Extending: {:?}", corpus.name);
match importer.extend_corpus() {
Ok(_) => {},
Err(e) => println!("Corpus extension panicked: {:?}", e),
};
let extend_end = time::get_time();
let extend_duration = (extend_end - extend_start).num_milliseconds();
println!(
"-- Extending corpus {:?} took {:?}ms",
corpus.name, extend_duration
);
// Then re-register all services, so that they pick up on the tasks
let register_start = time::get_time();
match corpus.select_services(&backend.connection) {
Ok(services) => {
for service in services {
let service_id = service.id;
if service_id > 2 {
println!(
" Extending service {:?} on corpus {:?}",
service.name, corpus.name
);
backend.extend_service(&service, &corpus.path).unwrap();
}
}
},
Err(e) => println!("Services could not be fetched: {:?}", e),
};
let register_end = time::get_time();
let register_duration = (register_end - register_start).num_milliseconds();<|fim▁hole|> }
}<|fim▁end|>
|
println!(
"-- Service registration on corpus {:?} took {:?}ms",
corpus.name, register_duration
);
|
<|file_name|>11057.cpp<|end_file_name|><|fim▁begin|>#include <algorithm>
#include <cassert>
#include <cmath>
#include <iostream>
#include <set>
#include <sstream>
#include <vector>
using namespace std;
int main() {
while (true) {
int N, M;
cin >> N;<|fim▁hole|> break;
multiset<int> prices;
for (int i = 0; i < N; ++i) {
int p;
cin >> p;
prices.insert(p);
}
cin >> M;
int best_i, best_j;
bool got_best = false;
for (multiset<int>::const_iterator itr = prices.begin();
itr != prices.end();
++itr) {
int i = *itr, j = M - i;
if ( (j <= 0) ||
(prices.count(j) == 0) ||
((i == j) && (prices.count(i) < 2)) )
continue;
if (!got_best || (abs(i - j) < abs(best_i - best_j))) {
best_i = i;
best_j = j;
got_best = true;
}
}
if (best_i > best_j) {
swap(best_i, best_j);
}
cout << "Peter should buy books whose prices are " << best_i << " and " << best_j << "." << endl << endl;
}
return 0;
}<|fim▁end|>
|
if (!cin)
|
<|file_name|>resource_task.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A task that takes a URL and streams back the binary data.
use about_loader;
use data_loader;
use file_loader;
use http_loader;
use cookie_storage::CookieStorage;
use cookie;
use mime_classifier::MIMEClassifier;
use net_traits::{ControlMsg, LoadData, LoadResponse, LoadConsumer, CookieSource};
use net_traits::{Metadata, ProgressMsg, ResourceTask, AsyncResponseTarget, ResponseAction};
use net_traits::ProgressMsg::Done;
use util::opts;
use util::task::spawn_named;
use url::Url;
use hsts::{HSTSList, HSTSEntry, preload_hsts_domains};
use devtools_traits::{DevtoolsControlMsg};
use hyper::header::{ContentType, Header, SetCookie, UserAgent};
use hyper::mime::{Mime, TopLevel, SubLevel};
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use std::borrow::ToOwned;
use std::boxed::FnBox;
use std::sync::Arc;
use std::sync::Mutex;
use std::sync::mpsc::{channel, Sender};
pub enum ProgressSender {
Channel(IpcSender<ProgressMsg>),
Listener(AsyncResponseTarget),
}
impl ProgressSender {
//XXXjdm return actual error
pub fn send(&self, msg: ProgressMsg) -> Result<(), ()> {
match *self {
ProgressSender::Channel(ref c) => c.send(msg).map_err(|_| ()),
ProgressSender::Listener(ref b) => {
let action = match msg {
ProgressMsg::Payload(buf) => ResponseAction::DataAvailable(buf),
ProgressMsg::Done(status) => ResponseAction::ResponseComplete(status),
};
b.invoke_with_listener(action);
Ok(())
}
}
}
}
/// For use by loaders in responding to a Load message.
pub fn start_sending(start_chan: LoadConsumer, metadata: Metadata) -> ProgressSender {
start_sending_opt(start_chan, metadata).ok().unwrap()
}
/// For use by loaders in responding to a Load message that allows content sniffing.
pub fn start_sending_sniffed(start_chan: LoadConsumer, metadata: Metadata,
classifier: Arc<MIMEClassifier>, partial_body: &Vec<u8>)
-> ProgressSender {
start_sending_sniffed_opt(start_chan, metadata, classifier, partial_body).ok().unwrap()
}
/// For use by loaders in responding to a Load message that allows content sniffing.
pub fn start_sending_sniffed_opt(start_chan: LoadConsumer, mut metadata: Metadata,
classifier: Arc<MIMEClassifier>, partial_body: &Vec<u8>)
-> Result<ProgressSender, ()> {
if opts::get().sniff_mime_types {
// TODO: should be calculated in the resource loader, from pull requeset #4094
let mut nosniff = false;
let mut check_for_apache_bug = false;
if let Some(ref headers) = metadata.headers {
if let Some(ref raw_content_type) = headers.get_raw("content-type") {
if raw_content_type.len() > 0 {
let ref last_raw_content_type = raw_content_type[raw_content_type.len() - 1];
check_for_apache_bug = last_raw_content_type == b"text/plain"
|| last_raw_content_type == b"text/plain; charset=ISO-8859-1"
|| last_raw_content_type == b"text/plain; charset=iso-8859-1"
|| last_raw_content_type == b"text/plain; charset=UTF-8";
}
}
if let Some(ref raw_content_type_options) = headers.get_raw("X-content-type-options") {
nosniff = raw_content_type_options.iter().any(|ref opt| *opt == b"nosniff");
}
}
let supplied_type =
metadata.content_type.map(|ContentType(Mime(toplevel, sublevel, _))| {
(format!("{}", toplevel), format!("{}", sublevel))
});
metadata.content_type = classifier.classify(nosniff, check_for_apache_bug, &supplied_type,
&partial_body).map(|(toplevel, sublevel)| {
let mime_tp: TopLevel = toplevel.parse().unwrap();
let mime_sb: SubLevel = sublevel.parse().unwrap();
ContentType(Mime(mime_tp, mime_sb, vec!()))
});
}
start_sending_opt(start_chan, metadata)
}
/// For use by loaders in responding to a Load message.<|fim▁hole|> match start_chan {
LoadConsumer::Channel(start_chan) => {
let (progress_chan, progress_port) = ipc::channel().unwrap();
let result = start_chan.send(LoadResponse {
metadata: metadata,
progress_port: progress_port,
});
match result {
Ok(_) => Ok(ProgressSender::Channel(progress_chan)),
Err(_) => Err(())
}
}
LoadConsumer::Listener(target) => {
target.invoke_with_listener(ResponseAction::HeadersAvailable(metadata));
Ok(ProgressSender::Listener(target))
}
}
}
/// Create a ResourceTask
pub fn new_resource_task(user_agent: String,
devtools_chan: Option<Sender<DevtoolsControlMsg>>) -> ResourceTask {
let hsts_preload = match preload_hsts_domains() {
Some(list) => list,
None => HSTSList::new()
};
let (setup_chan, setup_port) = ipc::channel().unwrap();
let setup_chan_clone = setup_chan.clone();
spawn_named("ResourceManager".to_owned(), move || {
let resource_manager = ResourceManager::new(
user_agent, setup_chan_clone, hsts_preload, devtools_chan
);
let mut channel_manager = ResourceChannelManager {
from_client: setup_port,
resource_manager: resource_manager
};
channel_manager.start();
});
setup_chan
}
struct ResourceChannelManager {
from_client: IpcReceiver<ControlMsg>,
resource_manager: ResourceManager
}
impl ResourceChannelManager {
fn start(&mut self) {
loop {
match self.from_client.recv().unwrap() {
ControlMsg::Load(load_data, consumer) => {
self.resource_manager.load(load_data, consumer)
}
ControlMsg::SetCookiesForUrl(request, cookie_list, source) => {
self.resource_manager.set_cookies_for_url(request, cookie_list, source)
}
ControlMsg::GetCookiesForUrl(url, consumer, source) => {
consumer.send(self.resource_manager.cookie_storage.cookies_for_url(&url, source)).unwrap();
}
ControlMsg::SetHSTSEntryForHost(host, include_subdomains, max_age) => {
if let Some(entry) = HSTSEntry::new(host, include_subdomains, Some(max_age)) {
self.resource_manager.add_hsts_entry(entry)
}
}
ControlMsg::Exit => {
break
}
}
}
}
}
pub struct ResourceManager {
user_agent: String,
cookie_storage: CookieStorage,
resource_task: IpcSender<ControlMsg>,
mime_classifier: Arc<MIMEClassifier>,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
hsts_list: Arc<Mutex<HSTSList>>
}
impl ResourceManager {
pub fn new(user_agent: String,
resource_task: IpcSender<ControlMsg>,
hsts_list: HSTSList,
devtools_channel: Option<Sender<DevtoolsControlMsg>>) -> ResourceManager {
ResourceManager {
user_agent: user_agent,
cookie_storage: CookieStorage::new(),
resource_task: resource_task,
mime_classifier: Arc::new(MIMEClassifier::new()),
devtools_chan: devtools_channel,
hsts_list: Arc::new(Mutex::new(hsts_list))
}
}
}
impl ResourceManager {
fn set_cookies_for_url(&mut self, request: Url, cookie_list: String, source: CookieSource) {
let header = Header::parse_header(&[cookie_list.into_bytes()]);
if let Ok(SetCookie(cookies)) = header {
for bare_cookie in cookies {
if let Some(cookie) = cookie::Cookie::new_wrapped(bare_cookie, &request, source) {
self.cookie_storage.push(cookie, source);
}
}
}
}
pub fn add_hsts_entry(&mut self, entry: HSTSEntry) {
self.hsts_list.lock().unwrap().push(entry);
}
pub fn is_host_sts(&self, host: &str) -> bool {
self.hsts_list.lock().unwrap().is_host_secure(host)
}
fn load(&mut self, mut load_data: LoadData, consumer: LoadConsumer) {
load_data.preserved_headers.set(UserAgent(self.user_agent.clone()));
fn from_factory(factory: fn(LoadData, LoadConsumer, Arc<MIMEClassifier>))
-> Box<FnBox(LoadData, LoadConsumer, Arc<MIMEClassifier>) + Send> {
box move |load_data, senders, classifier| {
factory(load_data, senders, classifier)
}
}
let loader = match &*load_data.url.scheme {
"file" => from_factory(file_loader::factory),
"http" | "https" | "view-source" =>
http_loader::factory(self.resource_task.clone(), self.devtools_chan.clone(), self.hsts_list.clone()),
"data" => from_factory(data_loader::factory),
"about" => from_factory(about_loader::factory),
_ => {
debug!("resource_task: no loader for scheme {}", load_data.url.scheme);
start_sending(consumer, Metadata::default(load_data.url))
.send(ProgressMsg::Done(Err("no loader for scheme".to_string()))).unwrap();
return
}
};
debug!("resource_task: loading url: {}", load_data.url.serialize());
loader.call_box((load_data, consumer, self.mime_classifier.clone()));
}
}<|fim▁end|>
|
pub fn start_sending_opt(start_chan: LoadConsumer, metadata: Metadata) -> Result<ProgressSender, ()> {
|
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|>/**
* Michael' (The non-Asian one's) librarys.<br><|fim▁hole|> * Thank you for the help!
* @author Michael [???]
*
*/
package com.shadow53.libs;<|fim▁end|>
| |
<|file_name|>util.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use ethereum_types::H256;
use hmac::{Hmac, Mac, NewMac};
use secp256k1::PublicKey;
use sha2::Sha256;
use sha3::{Digest, Keccak256};
use std::fmt::{self, Formatter};
pub fn keccak256(data: &[u8]) -> H256 {
H256::from(Keccak256::digest(data).as_ref())
}
pub fn sha256(data: &[u8]) -> H256 {
H256::from(Sha256::digest(data).as_ref())
}
pub fn hmac_sha256(key: &[u8], input: &[&[u8]], auth_data: &[u8]) -> H256 {
let mut hmac = Hmac::<Sha256>::new_varkey(key).unwrap();
for input in input {
hmac.update(input);
}
hmac.update(auth_data);
H256::from_slice(&*hmac.finalize().into_bytes())
}
pub fn pk2id(pk: &PublicKey) -> PeerId {
PeerId::from_slice(&pk.serialize_uncompressed()[1..])
}
pub fn id2pk(id: PeerId) -> Result<PublicKey, secp256k1::Error> {
let mut s = [0_u8; 65];
s[0] = 4;
s[1..].copy_from_slice(&id.as_bytes());
PublicKey::from_slice(&s)
}
pub fn hex_debug<T: AsRef<[u8]>>(s: &T, f: &mut Formatter) -> fmt::Result {
f.write_str(&hex::encode(&s))
}
#[cfg(test)]
mod tests {
use super::*;
use secp256k1::{SecretKey, SECP256K1};
#[test]
fn pk2id2pk() {
let prikey = SecretKey::new(&mut secp256k1::rand::thread_rng());
let pubkey = PublicKey::from_secret_key(SECP256K1, &prikey);
assert_eq!(pubkey, id2pk(pk2id(&pubkey)).unwrap());
}
}<|fim▁end|>
|
use crate::types::*;
|
<|file_name|>issue-17718-const-borrow.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::UnsafeCell;
const A: UnsafeCell<usize> = UnsafeCell::new(1);
const B: &'static UnsafeCell<usize> = &A;<|fim▁hole|>//~^ ERROR: cannot borrow a constant which may contain interior mutability
struct C { a: UnsafeCell<usize> }
const D: C = C { a: UnsafeCell::new(1) };
const E: &'static UnsafeCell<usize> = &D.a;
//~^ ERROR: cannot borrow a constant which may contain interior mutability
const F: &'static C = &D;
//~^ ERROR: cannot borrow a constant which may contain interior mutability
fn main() {}<|fim▁end|>
| |
<|file_name|>0002_auto_20190430_1520.py<|end_file_name|><|fim▁begin|># Generated by Django 2.1.7 on 2019-04-30 13:20
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [<|fim▁hole|> field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
),
]<|fim▁end|>
|
migrations.AlterField(
model_name='publishablemodel',
name='id',
|
<|file_name|>SpongeSignaledOutputData.java<|end_file_name|><|fim▁begin|>/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell<|fim▁hole|> *
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.data.manipulator.block;
import static org.spongepowered.api.data.DataQuery.of;
import org.spongepowered.api.data.DataContainer;
import org.spongepowered.api.data.DataQuery;
import org.spongepowered.api.data.MemoryDataContainer;
import org.spongepowered.api.data.manipulator.block.SignaledOutputData;
import org.spongepowered.common.data.manipulator.AbstractIntData;
public class SpongeSignaledOutputData extends AbstractIntData<SignaledOutputData> implements SignaledOutputData {
public static final DataQuery OUTPUT_SIGNAL_STRENGTH = of("OutputSignalStrength");
public SpongeSignaledOutputData() {
super(SignaledOutputData.class, 0, 0, 15);
}
@Override
public int getOutputSignal() {
return this.getValue();
}
@Override
public SignaledOutputData setOutputSignal(int signal) {
return this.setValue(signal);
}
@Override
public SignaledOutputData copy() {
return new SpongeSignaledOutputData().setValue(this.getValue());
}
@Override
public DataContainer toContainer() {
return new MemoryDataContainer().set(OUTPUT_SIGNAL_STRENGTH, this.getValue());
}
}<|fim▁end|>
|
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
|
<|file_name|>Alignment.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# ZetCode PyGTK tutorial
#
# This example shows how to use
# the Alignment widget
#
# author: jan bodnar
# website: zetcode.com
# last edited: February 2009
import gtk
import gobject
class PyApp(gtk.Window):
def __init__(self):<|fim▁hole|> self.set_position(gtk.WIN_POS_CENTER)
vbox = gtk.VBox(False, 5)
hbox = gtk.HBox(True, 3)
valign = gtk.Alignment(0, 1, 0, 0)
vbox.pack_start(valign)
ok = gtk.Button("OK")
ok.set_size_request(70, 30)
close = gtk.Button("Close")
hbox.add(ok)
hbox.add(close)
halign = gtk.Alignment(1, 0, 0, 0)
halign.add(hbox)
vbox.pack_start(halign, False, False, 3)
self.add(vbox)
self.connect("destroy", gtk.main_quit)
self.show_all()
PyApp()
gtk.main()<|fim▁end|>
|
super(PyApp, self).__init__()
self.set_title("Alignment")
self.set_size_request(260, 150)
|
<|file_name|>curved.cpp<|end_file_name|><|fim▁begin|>// This file is part of Hermes2D.
//
// Hermes2D is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 2 of the License, or
// (at your option) any later version.
//
// Hermes2D is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Hermes2D. If not, see <http://www.gnu.org/licenses/>.
#include "curved.h"
#include <algorithm>
#include "global.h"
#include "shapeset/shapeset_h1_all.h"
#include "shapeset/shapeset_common.h"
#include "shapeset/precalc.h"
#include "mesh.h"
#include "quad_all.h"
#include "matrix.h"
#include "algebra/dense_matrix_operations.h"
using namespace Hermes::Algebra::DenseMatrixOperations;
namespace Hermes
{
namespace Hermes2D
{
HERMES_API Quad1DStd g_quad_1d_std;
HERMES_API Quad2DStd g_quad_2d_std;
H1ShapesetJacobi ref_map_shapeset;
PrecalcShapesetAssembling ref_map_pss_static(&ref_map_shapeset);
CurvMapStatic::CurvMapStatic()
{
int order = ref_map_shapeset.get_max_order();
this->edge_proj_matrix_size = order - 1;
// Edges.
this->edge_proj_matrix = new_matrix<double>(edge_proj_matrix_size, edge_proj_matrix_size);
edge_p = malloc_with_check<double>(edge_proj_matrix_size);
// Bubbles - triangles.
this->tri_bubble_np = ref_map_shapeset.get_num_bubbles(order, HERMES_MODE_TRIANGLE);
bubble_proj_matrix_tri = new_matrix<double>(tri_bubble_np, tri_bubble_np);
bubble_tri_p = malloc_with_check<double>(tri_bubble_np);
// Bubbles - quads.
order = H2D_MAKE_QUAD_ORDER(order, order);
this->quad_bubble_np = ref_map_shapeset.get_num_bubbles(order, HERMES_MODE_QUAD);
bubble_proj_matrix_quad = new_matrix<double>(quad_bubble_np, quad_bubble_np);
bubble_quad_p = malloc_with_check<double>(quad_bubble_np);
this->precalculate_cholesky_projection_matrices_bubble();
this->precalculate_cholesky_projection_matrix_edge();
}
CurvMapStatic::~CurvMapStatic()
{
free_with_check(edge_proj_matrix, true);
free_with_check(bubble_proj_matrix_tri, true);
free_with_check(bubble_proj_matrix_quad, true);
free_with_check(edge_p);
free_with_check(bubble_tri_p);
free_with_check(bubble_quad_p);
}
double** CurvMapStatic::calculate_bubble_projection_matrix(short* indices, ElementMode2D mode)
{
unsigned short nb;
double** mat;
if (mode == HERMES_MODE_TRIANGLE)
{
mat = this->bubble_proj_matrix_tri;
nb = this->tri_bubble_np;
}
else
{
mat = this->bubble_proj_matrix_quad;
nb = this->quad_bubble_np;
}
PrecalcShapesetAssembling ref_map_pss_static_temp(&ref_map_shapeset);
ref_map_pss_static_temp.set_active_element(ref_map_pss_static.get_active_element());
for (unsigned short i = 0; i < nb; i++)
{
for (unsigned short j = i; j < nb; j++)
{
short ii = indices[i], ij = indices[j];
unsigned short o = ref_map_shapeset.get_order(ii, mode) + ref_map_shapeset.get_order(ij, mode);
o = std::max(H2D_GET_V_ORDER(o), H2D_GET_H_ORDER(o));
ref_map_pss_static.set_active_shape(ii);
ref_map_pss_static.set_quad_order(o, H2D_FN_VAL);
const double* fni = ref_map_pss_static.get_fn_values();
ref_map_pss_static_temp.set_active_shape(ij);
ref_map_pss_static_temp.set_quad_order(o, H2D_FN_VAL);
const double* fnj = ref_map_pss_static_temp.get_fn_values();
double3* pt = g_quad_2d_std.get_points(o, mode);
double val = 0.0;
for (unsigned short k = 0; k < g_quad_2d_std.get_num_points(o, mode); k++)
val += pt[k][2] * (fni[k] * fnj[k]);
mat[i][j] = mat[j][i] = val;
}
}
return mat;
}
void CurvMapStatic::precalculate_cholesky_projection_matrices_bubble()
{
// *** triangles ***
// calculate projection matrix of maximum order
{
Element e;
e.nvert = 3;
e.cm = nullptr;
e.id = -1;
ref_map_pss_static.set_active_element(&e);
short* indices = ref_map_shapeset.get_bubble_indices(ref_map_shapeset.get_max_order(), HERMES_MODE_TRIANGLE);
curvMapStatic.bubble_proj_matrix_tri = calculate_bubble_projection_matrix(indices, HERMES_MODE_TRIANGLE);
// cholesky factorization of the matrix
choldc(curvMapStatic.bubble_proj_matrix_tri, this->tri_bubble_np, curvMapStatic.bubble_tri_p);
}
// *** quads ***
// calculate projection matrix of maximum order
{
Element e;
e.nvert = 4;
e.cm = nullptr;
e.id = -1;
ref_map_pss_static.set_active_element(&e);
short *indices = ref_map_shapeset.get_bubble_indices(H2D_MAKE_QUAD_ORDER(ref_map_shapeset.get_max_order(), ref_map_shapeset.get_max_order()), HERMES_MODE_QUAD);
curvMapStatic.bubble_proj_matrix_quad = calculate_bubble_projection_matrix(indices, HERMES_MODE_QUAD);
// cholesky factorization of the matrix
choldc(curvMapStatic.bubble_proj_matrix_quad, this->quad_bubble_np, curvMapStatic.bubble_quad_p);
}
}
void CurvMapStatic::precalculate_cholesky_projection_matrix_edge()
{
// calculate projection matrix of maximum order
for (int i = 0; i < this->edge_proj_matrix_size; i++)
{
for (int j = i; j < this->edge_proj_matrix_size; j++)
{
int o = i + j + 4;
double2* pt = g_quad_1d_std.get_points(o);
double val = 0.0;
for (int k = 0; k < g_quad_1d_std.get_num_points(o); k++)
{
double fi = 0;
double fj = 0;
double x = pt[k][0];
switch (i + 2)
{
case 0:
fi = l0(x);
break;
case 1:
fi = l1(x);
break;
case 2:
fi = l2(x);
break;
case 3:
fi = l3(x);
break;
case 4:
fi = l4(x);
break;
case 5:
fi = l5(x);
break;
case 6:
fi = l6(x);
break;
case 7:
fi = l7(x);
break;
case 8:
fi = l8(x);
break;
case 9:
fi = l9(x);
break;
case 10:
fi = l10(x);
break;
case 11:<|fim▁hole|> {
case 0:
fj = l0(x);
break;
case 1:
fj = l1(x);
break;
case 2:
fj = l2(x);
break;
case 3:
fj = l3(x);
break;
case 4:
fj = l4(x);
break;
case 5:
fj = l5(x);
break;
case 6:
fj = l6(x);
break;
case 7:
fj = l7(x);
break;
case 8:
fj = l8(x);
break;
case 9:
fj = l9(x);
break;
case 10:
fj = l10(x);
break;
case 11:
fj = l11(x);
break;
}
val += pt[k][1] * (fi * fj);
}
this->edge_proj_matrix[i][j] = this->edge_proj_matrix[j][i] = val;
}
}
// Cholesky factorization of the matrix
choldc(this->edge_proj_matrix, this->edge_proj_matrix_size, this->edge_p);
}
CurvMapStatic curvMapStatic;
Curve::Curve(CurvType type) : type(type)
{
}
Curve::~Curve()
{
}
Arc::Arc() : Curve(ArcType)
{
kv[0] = kv[1] = kv[2] = 0;
kv[3] = kv[4] = kv[5] = 1;
}
Arc::Arc(double angle) : Curve(ArcType), angle(angle)
{
kv[0] = kv[1] = kv[2] = 0;
kv[3] = kv[4] = kv[5] = 1;
}
Arc::Arc(const Arc* other) : Curve(ArcType)
{
this->angle = other->angle;
memcpy(this->kv, other->kv, 6 * sizeof(double));
memcpy(this->pt, other->pt, 3 * sizeof(double3));
}
Nurbs::Nurbs() : Curve(NurbsType)
{
pt = nullptr;
kv = nullptr;
};
Nurbs::~Nurbs()
{
free_with_check(pt);
free_with_check(kv);
};
Nurbs::Nurbs(const Nurbs* other) : Curve(NurbsType)
{
this->degree = other->degree;
this->nk = other->nk;
this->np = other->np;
this->kv = malloc_with_check<double>(nk);
this->pt = malloc_with_check<double3>(np);
}
static double lambda_0(double x, double y)
{
return -0.5 * (x + y);
}
static double lambda_1(double x, double y)
{
return 0.5 * (x + 1);
}
static double lambda_2(double x, double y)
{
return 0.5 * (y + 1);
}
CurvMap::CurvMap() : ref_map_pss(&ref_map_shapeset)
{
coeffs = nullptr;
ctm = nullptr;
memset(curves, 0, sizeof(Curve*)* H2D_MAX_NUMBER_EDGES);
this->parent = nullptr;
this->sub_idx = 0;
}
CurvMap::CurvMap(const CurvMap* cm) : ref_map_pss(&ref_map_shapeset)
{
this->nc = cm->nc;
this->order = cm->order;
/// \todo Find out if this is safe.
this->ctm = cm->ctm;
this->coeffs = malloc_with_check<double2>(nc, true);
memcpy(coeffs, cm->coeffs, sizeof(double2)* nc);
this->toplevel = cm->toplevel;
if (this->toplevel)
{
for (int i = 0; i < 4; i++)
{
if (cm->curves[i])
{
if (cm->curves[i]->type == NurbsType)
this->curves[i] = new Nurbs((Nurbs*)cm->curves[i]);
else
this->curves[i] = new Arc((Arc*)cm->curves[i]);
}
else
this->curves[i] = nullptr;
}
this->parent = nullptr;
this->sub_idx = 0;
}
else
{
memset(curves, 0, sizeof(Curve*)* H2D_MAX_NUMBER_EDGES);
this->parent = cm->parent;
this->sub_idx = cm->sub_idx;
}
}
CurvMap::~CurvMap()
{
this->free();
}
void CurvMap::free()
{
free_with_check(this->coeffs, true);
if (toplevel)
{
for (int i = 0; i < 4; i++)
if (curves[i])
{
delete curves[i];
curves[i] = nullptr;
}
}
}
double CurvMap::nurbs_basis_fn(unsigned short i, unsigned short k, double t, double* knot)
{
if (k == 0)
{
return (t >= knot[i] && t <= knot[i + 1] && knot[i] < knot[i + 1]) ? 1.0 : 0.0;
}
else
{
double N1 = nurbs_basis_fn(i, k - 1, t, knot);
double N2 = nurbs_basis_fn(i + 1, k - 1, t, knot);
if ((N1 > HermesEpsilon) || (N2 > HermesEpsilon))
{
double result = 0.0;
if ((N1 > HermesEpsilon) && knot[i + k] != knot[i])
result += ((t - knot[i]) / (knot[i + k] - knot[i])) * N1;
if ((N2 > HermesEpsilon) && knot[i + k + 1] != knot[i + 1])
result += ((knot[i + k + 1] - t) / (knot[i + k + 1] - knot[i + 1])) * N2;
return result;
}
else
return 0.0;
}
}
void CurvMap::nurbs_edge(Element* e, Curve* curve, int edge, double t, double& x,
double& y)
{
// Nurbs curves are parametrized from 0 to 1.
t = (t + 1.0) / 2.0;
// Start point A, end point B.
double2 A = { e->vn[edge]->x, e->vn[edge]->y };
double2 B = { e->vn[e->next_vert(edge)]->x, e->vn[e->next_vert(edge)]->y };
// Vector pointing from A to B.
double2 v = { B[0] - A[0], B[1] - A[1] };
// Straight line.
if (!curve)
{
x = A[0] + t * v[0];
y = A[1] + t * v[1];
}
else
{
double3* cp;
int degree, np;
double* kv;
if (curve->type == ArcType)
{
cp = ((Arc*)curve)->pt;
np = ((Arc*)curve)->np;
degree = ((Arc*)curve)->degree;
kv = ((Arc*)curve)->kv;
}
else
{
cp = ((Nurbs*)curve)->pt;
np = ((Nurbs*)curve)->np;
degree = ((Nurbs*)curve)->degree;
kv = ((Nurbs*)curve)->kv;
}
// sum of basis fns and weights
double sum = 0.0;
x = y = 0.0;
for (int i = 0; i < np; i++)
{
double basis = nurbs_basis_fn(i, degree, t, kv);
sum += cp[i][2] * basis;
double x_i = cp[i][0];
double y_i = cp[i][1];
double w_i = cp[i][2];
x += w_i * basis * x_i;
y += w_i * basis * y_i;
}
x /= sum;
y /= sum;
}
}
const double2 CurvMap::ref_vert[2][H2D_MAX_NUMBER_VERTICES] =
{
{ { -1.0, -1.0 }, { 1.0, -1.0 }, { -1.0, 1.0 }, { 0.0, 0.0 } },
{ { -1.0, -1.0 }, { 1.0, -1.0 }, { 1.0, 1.0 }, { -1.0, 1.0 } }
};
void CurvMap::nurbs_edge_0(Element* e, Curve* curve, unsigned short edge, double t, double& x, double& y, double& n_x, double& n_y, double& t_x, double& t_y)
{
unsigned short va = edge;
unsigned short vb = e->next_vert(edge);
nurbs_edge(e, curve, edge, t, x, y);
x -= 0.5 * ((1 - t) * (e->vn[va]->x) + (1 + t) * (e->vn[vb]->x));
y -= 0.5 * ((1 - t) * (e->vn[va]->y) + (1 + t) * (e->vn[vb]->y));
double k = 4.0 / ((1 - t) * (1 + t));
x *= k;
y *= k;
}
void CurvMap::calc_ref_map_tri(Element* e, Curve** curve, double xi_1, double xi_2, double& x, double& y)
{
double fx, fy;
x = y = 0.0;
double l[3] = { lambda_0(xi_1, xi_2), lambda_1(xi_1, xi_2), lambda_2(xi_1, xi_2) };
for (unsigned char j = 0; j < e->get_nvert(); j++)
{
int va = j;
int vb = e->next_vert(j);
double la = l[va];
double lb = l[vb];
// vertex part
x += e->vn[j]->x * la;
y += e->vn[j]->y * la;
if (!(((ref_vert[0][va][0] == xi_1) && (ref_vert[0][va][1] == xi_2)) || ((ref_vert[0][vb][0] == xi_1) && (ref_vert[0][vb][1] == xi_2))))
{
// edge part
double t = lb - la;
double n_x, n_y, t_x, t_y;
nurbs_edge_0(e, curve[j], j, t, fx, fy, n_x, n_y, t_x, t_y);
x += fx * lb * la;
y += fy * lb * la;
}
}
}
void CurvMap::calc_ref_map_quad(Element* e, Curve** curve, double xi_1, double xi_2,
double& x, double& y)
{
double ex[H2D_MAX_NUMBER_EDGES], ey[H2D_MAX_NUMBER_EDGES];
nurbs_edge(e, curve[0], 0, xi_1, ex[0], ey[0]);
nurbs_edge(e, curve[1], 1, xi_2, ex[1], ey[1]);
nurbs_edge(e, curve[2], 2, -xi_1, ex[2], ey[2]);
nurbs_edge(e, curve[3], 3, -xi_2, ex[3], ey[3]);
x = (1 - xi_2) / 2.0 * ex[0] + (1 + xi_1) / 2.0 * ex[1] +
(1 + xi_2) / 2.0 * ex[2] + (1 - xi_1) / 2.0 * ex[3] -
(1 - xi_1)*(1 - xi_2) / 4.0 * e->vn[0]->x - (1 + xi_1)*(1 - xi_2) / 4.0 * e->vn[1]->x -
(1 + xi_1)*(1 + xi_2) / 4.0 * e->vn[2]->x - (1 - xi_1)*(1 + xi_2) / 4.0 * e->vn[3]->x;
y = (1 - xi_2) / 2.0 * ey[0] + (1 + xi_1) / 2.0 * ey[1] +
(1 + xi_2) / 2.0 * ey[2] + (1 - xi_1) / 2.0 * ey[3] -
(1 - xi_1)*(1 - xi_2) / 4.0 * e->vn[0]->y - (1 + xi_1)*(1 - xi_2) / 4.0 * e->vn[1]->y -
(1 + xi_1)*(1 + xi_2) / 4.0 * e->vn[2]->y - (1 - xi_1)*(1 + xi_2) / 4.0 * e->vn[3]->y;
}
void CurvMap::calc_ref_map(Element* e, Curve** curve, double xi_1, double xi_2, double2& f)
{
if (e->get_mode() == HERMES_MODE_QUAD)
calc_ref_map_quad(e, curve, xi_1, xi_2, f[0], f[1]);
else
calc_ref_map_tri(e, curve, xi_1, xi_2, f[0], f[1]);
}
void CurvMap::edge_coord(Element* e, unsigned short edge, double t, double2& x) const
{
unsigned short mode = e->get_mode();
double2 a, b;
a[0] = ctm->m[0] * ref_vert[mode][edge][0] + ctm->t[0];
a[1] = ctm->m[1] * ref_vert[mode][edge][1] + ctm->t[1];
b[0] = ctm->m[0] * ref_vert[mode][e->next_vert(edge)][0] + ctm->t[0];
b[1] = ctm->m[1] * ref_vert[mode][e->next_vert(edge)][1] + ctm->t[1];
for (int i = 0; i < 2; i++)
{
x[i] = a[i] + (t + 1.0) / 2.0 * (b[i] - a[i]);
}
}
void CurvMap::calc_edge_projection(Element* e, unsigned short edge, Curve** nurbs, unsigned short order, double2* proj) const
{
unsigned short i, j, k;
unsigned short mo1 = g_quad_1d_std.get_max_order();
unsigned char np = g_quad_1d_std.get_num_points(mo1);
unsigned short ne = order - 1;
unsigned short mode = e->get_mode();
assert(np <= 15 && ne <= 10);
double2 fn[15];
double rhside[2][10];
memset(rhside[0], 0, sizeof(double)* ne);
memset(rhside[1], 0, sizeof(double)* ne);
double a_1, a_2, b_1, b_2;
a_1 = ctm->m[0] * ref_vert[mode][edge][0] + ctm->t[0];
a_2 = ctm->m[1] * ref_vert[mode][edge][1] + ctm->t[1];
b_1 = ctm->m[0] * ref_vert[mode][e->next_vert(edge)][0] + ctm->t[0];
b_2 = ctm->m[1] * ref_vert[mode][e->next_vert(edge)][1] + ctm->t[1];
// values of nonpolynomial function in two vertices
double2 fa, fb;
calc_ref_map(e, nurbs, a_1, a_2, fa);
calc_ref_map(e, nurbs, b_1, b_2, fb);
double2* pt = g_quad_1d_std.get_points(mo1);
// over all integration points
for (j = 0; j < np; j++)
{
double2 x;
double t = pt[j][0];
edge_coord(e, edge, t, x);
calc_ref_map(e, nurbs, x[0], x[1], fn[j]);
for (k = 0; k < 2; k++)
fn[j][k] = fn[j][k] - (fa[k] + (t + 1) / 2.0 * (fb[k] - fa[k]));
}
double2* result = proj + e->get_nvert() + edge * (order - 1);
for (k = 0; k < 2; k++)
{
for (i = 0; i < ne; i++)
{
for (j = 0; j < np; j++)
{
double t = pt[j][0];
double fi = 0;
switch (i + 2)
{
case 0:
fi = l0(t);
break;
case 1:
fi = l1(t);
break;
case 2:
fi = l2(t);
break;
case 3:
fi = l3(t);
break;
case 4:
fi = l4(t);
break;
case 5:
fi = l5(t);
break;
case 6:
fi = l6(t);
break;
case 7:
fi = l7(t);
break;
case 8:
fi = l8(t);
break;
case 9:
fi = l9(t);
break;
case 10:
fi = l10(t);
break;
case 11:
fi = l11(t);
break;
}
rhside[k][i] += pt[j][1] * (fi * fn[j][k]);
}
}
// solve
cholsl(curvMapStatic.edge_proj_matrix, ne, curvMapStatic.edge_p, rhside[k], rhside[k]);
for (i = 0; i < ne; i++)
result[i][k] = rhside[k][i];
}
}
void CurvMap::old_projection(Element* e, unsigned short order, double2* proj, double* old[2])
{
unsigned short mo2 = g_quad_2d_std.get_max_order(e->get_mode());
unsigned char np = g_quad_2d_std.get_num_points(mo2, e->get_mode());
unsigned short nvert = e->get_nvert();
for (unsigned int k = 0; k < nvert; k++) // loop over vertices
{
// vertex basis functions in all integration points
int index_v = ref_map_shapeset.get_vertex_index(k, e->get_mode());
ref_map_pss.set_active_shape(index_v);
ref_map_pss.set_quad_order(mo2, H2D_FN_VAL_0);
const double* vd = ref_map_pss.get_fn_values();
for (int m = 0; m < 2; m++) // part 0 or 1
for (int j = 0; j < np; j++)
old[m][j] += proj[k][m] * vd[j];
for (int ii = 0; ii < order - 1; ii++)
{
// edge basis functions in all integration points
int index_e = ref_map_shapeset.get_edge_index(k, 0, ii + 2, e->get_mode());
ref_map_pss.set_active_shape(index_e);
ref_map_pss.set_quad_order(mo2, H2D_FN_VAL_0);
const double* ed = ref_map_pss.get_fn_values();
for (int m = 0; m < 2; m++) //part 0 or 1
for (int j = 0; j < np; j++)
old[m][j] += proj[nvert + k * (order - 1) + ii][m] * ed[j];
}
}
}
void CurvMap::calc_bubble_projection(Element* e, Curve** curve, unsigned short order, double2* proj)
{
ref_map_pss.set_active_element(e);
unsigned short i, j, k;
unsigned short mo2 = g_quad_2d_std.get_max_order(e->get_mode());
unsigned char np = g_quad_2d_std.get_num_points(mo2, e->get_mode());
unsigned short qo = e->is_quad() ? H2D_MAKE_QUAD_ORDER(order, order) : order;
unsigned short nb = ref_map_shapeset.get_num_bubbles(qo, e->get_mode());
double2* fn = new double2[np];
memset(fn, 0, np * sizeof(double2));
double* rhside[2];
double* old[2];
for (i = 0; i < 2; i++)
{
rhside[i] = new double[nb];
old[i] = new double[np];
memset(rhside[i], 0, sizeof(double)* nb);
memset(old[i], 0, sizeof(double)* np);
}
// compute known part of projection (vertex and edge part)
old_projection(e, order, proj, old);
// fn values of both components of nonpolynomial function
double3* pt = g_quad_2d_std.get_points(mo2, e->get_mode());
for (j = 0; j < np; j++) // over all integration points
{
double2 a;
a[0] = ctm->m[0] * pt[j][0] + ctm->t[0];
a[1] = ctm->m[1] * pt[j][1] + ctm->t[1];
calc_ref_map(e, curve, a[0], a[1], fn[j]);
}
double2* result = proj + e->get_nvert() + e->get_nvert() * (order - 1);
for (k = 0; k < 2; k++)
{
for (i = 0; i < nb; i++) // loop over bubble basis functions
{
// bubble basis functions in all integration points
int index_i = ref_map_shapeset.get_bubble_indices(qo, e->get_mode())[i];
ref_map_pss.set_active_shape(index_i);
ref_map_pss.set_quad_order(mo2, H2D_FN_VAL_0);
const double *bfn = ref_map_pss.get_fn_values();
for (j = 0; j < np; j++) // over all integration points
rhside[k][i] += pt[j][2] * (bfn[j] * (fn[j][k] - old[k][j]));
}
// solve
if (e->get_mode() == HERMES_MODE_TRIANGLE)
cholsl(curvMapStatic.bubble_proj_matrix_tri, nb, curvMapStatic.bubble_tri_p, rhside[k], rhside[k]);
else
cholsl(curvMapStatic.bubble_proj_matrix_quad, nb, curvMapStatic.bubble_quad_p, rhside[k], rhside[k]);
for (i = 0; i < nb; i++)
result[i][k] = rhside[k][i];
}
for (i = 0; i < 2; i++)
{
delete[] rhside[i];
delete[] old[i];
}
delete[] fn;
}
void CurvMap::update_refmap_coeffs(Element* e)
{
ref_map_pss.set_quad_2d(&g_quad_2d_std);
ref_map_pss.set_active_element(e);
// allocate projection coefficients
unsigned char nvert = e->get_nvert();
unsigned char ne = order - 1;
unsigned short qo = e->is_quad() ? H2D_MAKE_QUAD_ORDER(order, order) : order;
unsigned short nb = ref_map_shapeset.get_num_bubbles(qo, e->get_mode());
this->nc = nvert + nvert*ne + nb;
this->coeffs = realloc_with_check<double2>(this->coeffs, nc);
// WARNING: do not change the format of the array 'coeffs'. If it changes,
// RefMap::set_active_element() has to be changed too.
Curve** curves;
if (toplevel == false)
{
ref_map_pss.set_active_element(e);
ref_map_pss.set_transform(this->sub_idx);
curves = parent->cm->curves;
}
else
{
ref_map_pss.reset_transform();
curves = e->cm->curves;
}
ctm = ref_map_pss.get_ctm();
// calculation of new_ projection coefficients
// vertex part
for (unsigned char i = 0; i < nvert; i++)
{
coeffs[i][0] = e->vn[i]->x;
coeffs[i][1] = e->vn[i]->y;
}
if (!e->cm->toplevel)
e = e->cm->parent;
// edge part
for (unsigned char edge = 0; edge < nvert; edge++)
calc_edge_projection(e, edge, curves, order, coeffs);
//bubble part
calc_bubble_projection(e, curves, order, coeffs);
}
void CurvMap::get_mid_edge_points(Element* e, double2* pt, unsigned short n)
{
Curve** curves = this->curves;
Transformable tran;
tran.set_active_element(e);
if (toplevel == false)
{
tran.set_transform(this->sub_idx);
e = e->cm->parent;
curves = e->cm->curves;
}
ctm = tran.get_ctm();
double xi_1, xi_2;
for (unsigned short i = 0; i < n; i++)
{
xi_1 = ctm->m[0] * pt[i][0] + ctm->t[0];
xi_2 = ctm->m[1] * pt[i][1] + ctm->t[1];
calc_ref_map(e, curves, xi_1, xi_2, pt[i]);
}
}
CurvMap* CurvMap::create_son_curv_map(Element* e, int son)
{
// if the top three bits of part are nonzero, we would overflow
// -- make the element non-curvilinear
if (e->cm->sub_idx & 0xe000000000000000ULL)
return nullptr;
// if the parent element is already almost straight-edged,
// the son will be even more straight-edged
if (e->iro_cache == 0)
return nullptr;
CurvMap* cm = new CurvMap;
if (e->cm->toplevel == false)
{
cm->parent = e->cm->parent;
cm->sub_idx = (e->cm->sub_idx << 3) + son + 1;
}
else
{
cm->parent = e;
cm->sub_idx = (son + 1);
}
cm->toplevel = false;
cm->order = 4;
return cm;
}
}
}<|fim▁end|>
|
fi = l11(x);
break;
}
switch (j + 2)
|
<|file_name|>test_designate_client.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import six
from designateclient import exceptions as designate_exceptions
from designateclient import v1 as designate_client
from heat.common import exception as heat_exception
from heat.engine.clients.os import designate as client
from heat.tests import common
class DesignateDomainConstraintTest(common.HeatTestCase):
def test_expected_exceptions(self):
self.assertEqual((heat_exception.EntityNotFound,),
client.DesignateDomainConstraint.expected_exceptions,
"DesignateDomainConstraint expected exceptions error")
def test_constrain(self):
constrain = client.DesignateDomainConstraint()
client_mock = mock.MagicMock()
client_plugin_mock = mock.MagicMock()
client_plugin_mock.get_domain_id.return_value = None
client_mock.client_plugin.return_value = client_plugin_mock
self.assertIsNone(constrain.validate_with_client(client_mock,
'domain_1'))
client_plugin_mock.get_domain_id.assert_called_once_with('domain_1')
class DesignateClientPluginTest(common.HeatTestCase):
@mock.patch.object(designate_client, 'Client')
@mock.patch.object(client.DesignateClientPlugin, '_get_client_args')
def test_client(self,
get_client_args,
client_designate):
args = dict(
auth_url='auth_url',
project_id='project_id',
token=lambda: '',
os_endpoint='os_endpoint',
cacert='cacert',
insecure='insecure'<|fim▁hole|> )
get_client_args.return_value = args
client_plugin = client.DesignateClientPlugin(
context=mock.MagicMock()
)
client_plugin.client()
# Make sure the right args are created
get_client_args.assert_called_once_with(
service_name='designate',
service_type='dns'
)
# Make sure proper client is created with expected args
client_designate.assert_called_once_with(
auth_url='auth_url',
project_id='project_id',
token='',
endpoint='os_endpoint',
cacert='cacert',
insecure='insecure'
)
class DesignateClientPluginDomainTest(common.HeatTestCase):
sample_uuid = '477e8273-60a7-4c41-b683-fdb0bc7cd152'
sample_name = 'test-domain.com'
def _get_mock_domain(self):
domain = mock.MagicMock()
domain.id = self.sample_uuid
domain.name = self.sample_name
return domain
def setUp(self):
super(DesignateClientPluginDomainTest, self).setUp()
self._client = mock.MagicMock()
self.client_plugin = client.DesignateClientPlugin(
context=mock.MagicMock()
)
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_get_domain_id(self, client_designate):
self._client.domains.get.return_value = self._get_mock_domain()
client_designate.return_value = self._client
self.assertEqual(self.sample_uuid,
self.client_plugin.get_domain_id(self.sample_uuid))
self._client.domains.get.assert_called_once_with(
self.sample_uuid)
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_get_domain_id_not_found(self, client_designate):
self._client.domains.get.side_effect = (designate_exceptions
.NotFound)
client_designate.return_value = self._client
ex = self.assertRaises(heat_exception.EntityNotFound,
self.client_plugin.get_domain_id,
self.sample_uuid)
msg = ("The Designate Domain (%(name)s) could not be found." %
{'name': self.sample_uuid})
self.assertEqual(msg, six.text_type(ex))
self._client.domains.get.assert_called_once_with(
self.sample_uuid)
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_get_domain_id_by_name(self, client_designate):
self._client.domains.get.side_effect = (designate_exceptions
.NotFound)
self._client.domains.list.return_value = [self._get_mock_domain()]
client_designate.return_value = self._client
self.assertEqual(self.sample_uuid,
self.client_plugin.get_domain_id(self.sample_name))
self._client.domains.get.assert_called_once_with(
self.sample_name)
self._client.domains.list.assert_called_once_with()
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_get_domain_id_by_name_not_found(self, client_designate):
self._client.domains.get.side_effect = (designate_exceptions
.NotFound)
self._client.domains.list.return_value = []
client_designate.return_value = self._client
ex = self.assertRaises(heat_exception.EntityNotFound,
self.client_plugin.get_domain_id,
self.sample_name)
msg = ("The Designate Domain (%(name)s) could not be found." %
{'name': self.sample_name})
self.assertEqual(msg, six.text_type(ex))
self._client.domains.get.assert_called_once_with(
self.sample_name)
self._client.domains.list.assert_called_once_with()
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.domains.Domain')
def test_domain_create(self, mock_domain, client_designate):
self._client.domains.create.return_value = None
client_designate.return_value = self._client
domain = dict(
name='test-domain.com',
description='updated description',
ttl=4200,
email='[email protected]'
)
mock_sample_domain = mock.Mock()
mock_domain.return_value = mock_sample_domain
self.client_plugin.domain_create(**domain)
# Make sure domain entity is created with right arguments
mock_domain.assert_called_once_with(**domain)
self._client.domains.create.assert_called_once_with(
mock_sample_domain)
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_domain_update(self, client_designate):
self._client.domains.update.return_value = None
mock_domain = self._get_mock_domain()
self._client.domains.get.return_value = mock_domain
client_designate.return_value = self._client
domain = dict(
id='sample-id',
description='updated description',
ttl=4200,
email='[email protected]'
)
self.client_plugin.domain_update(**domain)
self._client.domains.get.assert_called_once_with(
mock_domain.id)
for key in domain.keys():
setattr(mock_domain, key, domain[key])
self._client.domains.update.assert_called_once_with(
mock_domain)
class DesignateClientPluginRecordTest(common.HeatTestCase):
sample_uuid = '477e8273-60a7-4c41-b683-fdb0bc7cd152'
sample_domain_id = '477e8273-60a7-4c41-b683-fdb0bc7cd153'
def _get_mock_record(self):
record = mock.MagicMock()
record.id = self.sample_uuid
record.domain_id = self.sample_domain_id
return record
def setUp(self):
super(DesignateClientPluginRecordTest, self).setUp()
self._client = mock.MagicMock()
self.client_plugin = client.DesignateClientPlugin(
context=mock.MagicMock()
)
self.client_plugin.get_domain_id = mock.Mock(
return_value=self.sample_domain_id)
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.records.Record')
def test_record_create(self, mock_record, client_designate):
self._client.records.create.return_value = None
client_designate.return_value = self._client
record = dict(
name='test-record.com',
description='updated description',
ttl=4200,
type='',
priority=1,
data='1.1.1.1',
domain=self.sample_domain_id
)
mock_sample_record = mock.Mock()
mock_record.return_value = mock_sample_record
self.client_plugin.record_create(**record)
# Make sure record entity is created with right arguments
domain_id = record.pop('domain')
mock_record.assert_called_once_with(**record)
self._client.records.create.assert_called_once_with(
domain_id,
mock_sample_record)
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.records.Record')
def test_record_update(self, mock_record, client_designate):
self._client.records.update.return_value = None
mock_record = self._get_mock_record()
self._client.records.get.return_value = mock_record
client_designate.return_value = self._client
record = dict(
id=self.sample_uuid,
name='test-record.com',
description='updated description',
ttl=4200,
type='',
priority=1,
data='1.1.1.1',
domain=self.sample_domain_id
)
self.client_plugin.record_update(**record)
self._client.records.get.assert_called_once_with(
self.sample_domain_id,
self.sample_uuid)
for key in record.keys():
setattr(mock_record, key, record[key])
self._client.records.update.assert_called_once_with(
self.sample_domain_id,
mock_record)
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.records.Record')
def test_record_delete(self, mock_record, client_designate):
self._client.records.delete.return_value = None
client_designate.return_value = self._client
record = dict(
id=self.sample_uuid,
domain=self.sample_domain_id
)
self.client_plugin.record_delete(**record)
self._client.records.delete.assert_called_once_with(
self.sample_domain_id,
self.sample_uuid)
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.records.Record')
def test_record_show(self, mock_record, client_designate):
self._client.records.get.return_value = None
client_designate.return_value = self._client
record = dict(
id=self.sample_uuid,
domain=self.sample_domain_id
)
self.client_plugin.record_show(**record)
self._client.records.get.assert_called_once_with(
self.sample_domain_id,
self.sample_uuid)<|fim▁end|>
| |
<|file_name|>FormatIndentDecrease.js<|end_file_name|><|fim▁begin|>import createSvgIcon from './utils/createSvgIcon';
import { jsx as _jsx } from "react/jsx-runtime";
export default createSvgIcon( /*#__PURE__*/_jsx("path", {
d: "M11 17h10v-2H11v2zm-8-5 4 4V8l-4 4zm0 9h18v-2H3v2zM3 3v2h18V3H3zm8 6h10V7H11v2zm0 4h10v-2H11v2z"<|fim▁hole|>}), 'FormatIndentDecrease');<|fim▁end|>
| |
<|file_name|>config_parser.py<|end_file_name|><|fim▁begin|>import configparser
CONFIG_PATH = 'accounting.conf'
class MyConfigParser():
def __init__(self, config_path=CONFIG_PATH):
self.config = configparser.ConfigParser(allow_no_value=True)
self.config.read(config_path)
def config_section_map(self, section):
""" returns all configuration options in 'section' in a dict with
key: config_option and value: the read value in the file"""
dict1 = {}
options = self.config.options(section)
for option in options:
try:
dict1[option] = self.config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
dict1[option] = None
return dict1
<|fim▁hole|><|fim▁end|>
|
# getint(section, option)
# getboolean(section, option)
|
<|file_name|>px.rs<|end_file_name|><|fim▁begin|>use crate::formats;
pub use rgb::alt::Gray;
pub use rgb::RGB;
pub use rgb::RGBA;
/// Use [`Pixel`](crate::Pixel) presets to specify pixel format.
///
/// The trait represents a temporary object that adds pixels together.
pub trait PixelFormat {
/// Pixel type in the source image
type InputPixel: Copy;
/// Pixel type in the destination image (usually the same as Input)
type OutputPixel;
/// Temporary struct for the pixel in floating-point
type Accumulator: Copy;
<|fim▁hole|> /// Add bunch of accumulated pixels with a weight (second axis)
fn add_acc(acc: &mut Self::Accumulator, inp: Self::Accumulator, coeff: f32);
/// Finalize, convert to output pixel format
fn into_pixel(&self, acc: Self::Accumulator) -> Self::OutputPixel;
}
impl<F: ToFloat, T: ToFloat> PixelFormat for formats::Rgb<T, F> {
type InputPixel = RGB<F>;
type OutputPixel = RGB<T>;
type Accumulator = RGB<f32>;
#[inline(always)]
fn new() -> Self::Accumulator {
RGB::new(0.,0.,0.)
}
#[inline(always)]
fn add(&self, acc: &mut Self::Accumulator, inp: RGB<F>, coeff: f32) {
acc.r += inp.r.to_float() * coeff;
acc.g += inp.g.to_float() * coeff;
acc.b += inp.b.to_float() * coeff;
}
#[inline(always)]
fn add_acc(acc: &mut Self::Accumulator, inp: Self::Accumulator, coeff: f32) {
acc.r += inp.r * coeff;
acc.g += inp.g * coeff;
acc.b += inp.b * coeff;
}
#[inline(always)]
fn into_pixel(&self, acc: Self::Accumulator) -> RGB<T> {
RGB {
r: T::from_float(acc.r),
g: T::from_float(acc.g),
b: T::from_float(acc.b),
}
}
}
impl<F: ToFloat, T: ToFloat> PixelFormat for formats::Rgba<T, F> {
type InputPixel = RGBA<F>;
type OutputPixel = RGBA<T>;
type Accumulator = RGBA<f32>;
#[inline(always)]
fn new() -> Self::Accumulator {
RGBA::new(0.,0.,0.,0.)
}
#[inline(always)]
fn add(&self, acc: &mut Self::Accumulator, inp: RGBA<F>, coeff: f32) {
acc.r += inp.r.to_float() * coeff;
acc.g += inp.g.to_float() * coeff;
acc.b += inp.b.to_float() * coeff;
acc.a += inp.a.to_float() * coeff;
}
#[inline(always)]
fn add_acc(acc: &mut Self::Accumulator, inp: Self::Accumulator, coeff: f32) {
acc.r += inp.r * coeff;
acc.g += inp.g * coeff;
acc.b += inp.b * coeff;
acc.a += inp.a * coeff;
}
#[inline(always)]
fn into_pixel(&self, acc: Self::Accumulator) -> RGBA<T> {
RGBA {
r: T::from_float(acc.r),
g: T::from_float(acc.g),
b: T::from_float(acc.b),
a: T::from_float(acc.a),
}
}
}
impl<F: ToFloat, T: ToFloat> PixelFormat for formats::RgbaPremultiply<T, F> {
type InputPixel = RGBA<F>;
type OutputPixel = RGBA<T>;
type Accumulator = RGBA<f32>;
#[inline(always)]
fn new() -> Self::Accumulator {
RGBA::new(0.,0.,0.,0.)
}
#[inline(always)]
fn add(&self, acc: &mut Self::Accumulator, inp: RGBA<F>, coeff: f32) {
let a_coeff = inp.a.to_float() * coeff;
acc.r += inp.r.to_float() * a_coeff;
acc.g += inp.g.to_float() * a_coeff;
acc.b += inp.b.to_float() * a_coeff;
acc.a += a_coeff;
}
#[inline(always)]
fn add_acc(acc: &mut Self::Accumulator, inp: Self::Accumulator, coeff: f32) {
acc.r += inp.r * coeff;
acc.g += inp.g * coeff;
acc.b += inp.b * coeff;
acc.a += inp.a * coeff;
}
#[inline(always)]
fn into_pixel(&self, acc: Self::Accumulator) -> RGBA<T> {
if acc.a > 0. {
let inv = 1.0 / acc.a;
RGBA {
r: T::from_float(acc.r * inv),
g: T::from_float(acc.g * inv),
b: T::from_float(acc.b * inv),
a: T::from_float(acc.a),
}
} else {
let zero = T::from_float(0.);
RGBA::new(zero, zero, zero, zero)
}
}
}
impl<F: ToFloat, T: ToFloat> PixelFormat for formats::Gray<F, T> {
type InputPixel = Gray<F>;
type OutputPixel = Gray<T>;
type Accumulator = Gray<f32>;
#[inline(always)]
fn new() -> Self::Accumulator {
Gray::new(0.)
}
#[inline(always)]
fn add(&self, acc: &mut Self::Accumulator, inp: Gray<F>, coeff: f32) {
acc.0 += inp.0.to_float() * coeff;
}
#[inline(always)]
fn add_acc(acc: &mut Self::Accumulator, inp: Self::Accumulator, coeff: f32) {
acc.0 += inp.0 * coeff;
}
#[inline(always)]
fn into_pixel(&self, acc: Self::Accumulator) -> Gray<T> {
Gray::new(T::from_float(acc.0))
}
}
use self::f::ToFloat;
mod f {
/// Internal, please don't use
pub trait ToFloat: Sized + Copy + 'static {
fn to_float(self) -> f32;
fn from_float(f: f32) -> Self;
}
impl ToFloat for u8 {
#[inline(always)]
fn to_float(self) -> f32 {
self as f32
}
#[inline(always)]
fn from_float(f: f32) -> Self {
unsafe {
(0f32).max(f.round()).min(255.).to_int_unchecked()
}
}
}
impl ToFloat for u16 {
#[inline(always)]
fn to_float(self) -> f32 {
self as f32
}
#[inline(always)]
fn from_float(f: f32) -> Self {
unsafe {
(0f32).max(f.round()).min(65535.).to_int_unchecked()
}
}
}
impl ToFloat for f32 {
#[inline(always)]
fn to_float(self) -> f32 {
self
}
#[inline(always)]
fn from_float(f: f32) -> Self {
f
}
}
impl ToFloat for f64 {
#[inline(always)]
fn to_float(self) -> f32 {
self as f32
}
#[inline(always)]
fn from_float(f: f32) -> Self {
f as f64
}
}
// Inherent methods are preferred over traits, so this won't be used in newer rust
trait OldRustWorkaround<T> {
unsafe fn to_int_unchecked(self) -> T;
}
impl OldRustWorkaround<u16> for f32 {
unsafe fn to_int_unchecked(self) -> u16 { self as u16 }
}
impl OldRustWorkaround<u8> for f32 {
unsafe fn to_int_unchecked(self) -> u8 { self as u8 }
}
}
// Inherent methods are preferred over traits, so this won't be used in newer rust
trait OldRustWorkaround<T> {
unsafe fn to_int_unchecked(self) -> T;
}
impl OldRustWorkaround<u16> for f32 {
unsafe fn to_int_unchecked(self) -> u16 { self as u16 }
}
impl OldRustWorkaround<u8> for f32 {
unsafe fn to_int_unchecked(self) -> u8 { self as u8 }
}<|fim▁end|>
|
/// Create new floating-point pixel
fn new() -> Self::Accumulator;
/// Add new pixel with a given weight (first axis)
fn add(&self, acc: &mut Self::Accumulator, inp: Self::InputPixel, coeff: f32);
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
import purchase_requisition
|
<|file_name|>ParallelBeamBlobKernelProjector2D.cpp<|end_file_name|><|fim▁begin|>/*
-----------------------------------------------------------------------
Copyright: 2010-2018, imec Vision Lab, University of Antwerp
2014-2018, CWI, Amsterdam
Contact: [email protected]
Website: http://www.astra-toolbox.com/
This file is part of the ASTRA Toolbox.
The ASTRA Toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The ASTRA Toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the ASTRA Toolbox. If not, see <http://www.gnu.org/licenses/>.
-----------------------------------------------------------------------
*/
#include "astra/ParallelBeamBlobKernelProjector2D.h"
#include <cmath>
#include <algorithm>
#include "astra/DataProjectorPolicies.h"
using namespace std;
using namespace astra;
#include "astra/ParallelBeamBlobKernelProjector2D.inl"
// type of the projector, needed to register with CProjectorFactory
std::string CParallelBeamBlobKernelProjector2D::type = "blob";
//----------------------------------------------------------------------------------------
// default constructor
CParallelBeamBlobKernelProjector2D::CParallelBeamBlobKernelProjector2D()
{
_clear();
}
//----------------------------------------------------------------------------------------
// constructor
CParallelBeamBlobKernelProjector2D::CParallelBeamBlobKernelProjector2D(CParallelProjectionGeometry2D* _pProjectionGeometry,
CVolumeGeometry2D* _pReconstructionGeometry,
float32 _fBlobSize,
float32 _fBlobSampleRate,
int _iBlobSampleCount,
float32* _pfBlobValues)
{
_clear();
initialize(_pProjectionGeometry, _pReconstructionGeometry, _fBlobSize, _fBlobSampleRate, _iBlobSampleCount, _pfBlobValues);
}
//----------------------------------------------------------------------------------------
// destructor
CParallelBeamBlobKernelProjector2D::~CParallelBeamBlobKernelProjector2D()
{
clear();
}
//---------------------------------------------------------------------------------------
// Clear - Constructors
void CParallelBeamBlobKernelProjector2D::_clear()
{
CProjector2D::_clear();
m_pfBlobValues = NULL;
m_iBlobSampleCount = 0;
m_fBlobSize = 0;
m_fBlobSampleRate = 0;
m_bIsInitialized = false;
}
//---------------------------------------------------------------------------------------
// Clear - Public
void CParallelBeamBlobKernelProjector2D::clear()
{
CProjector2D::clear();
if (m_pfBlobValues) {
delete[] m_pfBlobValues;
m_pfBlobValues = NULL;
}
m_iBlobSampleCount = 0;
m_fBlobSize = 0;
m_fBlobSampleRate = 0;
m_bIsInitialized = false;
}
<|fim▁hole|>//---------------------------------------------------------------------------------------
// Check
bool CParallelBeamBlobKernelProjector2D::_check()
{
// check base class
ASTRA_CONFIG_CHECK(CProjector2D::_check(), "ParallelBeamBlobKernelProjector2D", "Error in Projector2D initialization");
ASTRA_CONFIG_CHECK(dynamic_cast<CParallelProjectionGeometry2D*>(m_pProjectionGeometry) || dynamic_cast<CParallelVecProjectionGeometry2D*>(m_pProjectionGeometry), "ParallelBeamBlobKernelProjector2D", "Unsupported projection geometry");
ASTRA_CONFIG_CHECK(m_iBlobSampleCount > 0, "ParallelBeamBlobKernelProjector2D", "m_iBlobSampleCount should be strictly positive.");
ASTRA_CONFIG_CHECK(m_pfBlobValues, "ParallelBeamBlobKernelProjector2D", "Invalid Volume Geometry Object.");
// success
return true;
}
//---------------------------------------------------------------------------------------
// Initialize, use a Config object
bool CParallelBeamBlobKernelProjector2D::initialize(const Config& _cfg)
{
ASTRA_ASSERT(_cfg.self);
// if already initialized, clear first
if (m_bIsInitialized) {
clear();
}
// initialization of parent class
if (!CProjector2D::initialize(_cfg)) {
return false;
}
// required: Kernel
XMLNode node = _cfg.self.getSingleNode("Kernel");
ASTRA_CONFIG_CHECK(node, "BlobProjector", "No Kernel tag specified.");
{
// Required: KernelSize
XMLNode node2 = node.getSingleNode("KernelSize");
ASTRA_CONFIG_CHECK(node2, "BlobProjector", "No Kernel/KernelSize tag specified.");
m_fBlobSize = node2.getContentNumerical();
// Required: SampleRate
node2 = node.getSingleNode("SampleRate");
ASTRA_CONFIG_CHECK(node2, "BlobProjector", "No Kernel/SampleRate tag specified.");
m_fBlobSampleRate = node2.getContentNumerical();
// Required: SampleCount
node2 = node.getSingleNode("SampleCount");
ASTRA_CONFIG_CHECK(node2, "BlobProjector", "No Kernel/SampleCount tag specified.");
m_iBlobSampleCount = node2.getContentInt();
// Required: KernelValues
node2 = node.getSingleNode("KernelValues");
ASTRA_CONFIG_CHECK(node2, "BlobProjector", "No Kernel/KernelValues tag specified.");
vector<float32> values = node2.getContentNumericalArray();
ASTRA_CONFIG_CHECK(values.size() == (unsigned int)m_iBlobSampleCount, "BlobProjector", "Number of specified values doesn't match SampleCount.");
m_pfBlobValues = new float32[m_iBlobSampleCount];
for (int i = 0; i < m_iBlobSampleCount; i++) {
m_pfBlobValues[i] = values[i];
}
}
// success
m_bIsInitialized = _check();
return m_bIsInitialized;
}
//----------------------------------------------------------------------------------------
// initialize
bool CParallelBeamBlobKernelProjector2D::initialize(CParallelProjectionGeometry2D* _pProjectionGeometry,
CVolumeGeometry2D* _pVolumeGeometry,
float32 _fBlobSize,
float32 _fBlobSampleRate,
int _iBlobSampleCount,
float32* _pfBlobValues)
{
// if already initialized, clear first
if (m_bIsInitialized) {
clear();
}
ASTRA_CONFIG_CHECK(_pProjectionGeometry, "BlobProjector", "Invalid ProjectionGeometry Object");
ASTRA_CONFIG_CHECK(m_pVolumeGeometry, "BlobProjector", "Invalid ProjectionGeometry Object");
m_pProjectionGeometry = _pProjectionGeometry->clone();
m_pVolumeGeometry = _pVolumeGeometry->clone();
m_fBlobSize = _fBlobSize;
m_fBlobSampleRate = _fBlobSampleRate;
m_iBlobSampleCount = _iBlobSampleCount;
m_pfBlobValues = new float32[_iBlobSampleCount];
for (int i = 0; i <_iBlobSampleCount; i++) {
m_pfBlobValues[i] = _pfBlobValues[i];
}
// success
m_bIsInitialized = _check();
return m_bIsInitialized;
}
//----------------------------------------------------------------------------------------
// Get maximum amount of weights on a single ray
int CParallelBeamBlobKernelProjector2D::getProjectionWeightsCount(int _iProjectionIndex)
{
int maxDim = max(m_pVolumeGeometry->getGridRowCount(), m_pVolumeGeometry->getGridColCount());
return (int)(maxDim * 2 * (m_fBlobSize+2) + 1);
}
//----------------------------------------------------------------------------------------
// Single Ray Weights
void CParallelBeamBlobKernelProjector2D::computeSingleRayWeights(int _iProjectionIndex,
int _iDetectorIndex,
SPixelWeight* _pWeightedPixels,
int _iMaxPixelCount,
int& _iStoredPixelCount)
{
ASTRA_ASSERT(m_bIsInitialized);
StorePixelWeightsPolicy p(_pWeightedPixels, _iMaxPixelCount);
projectSingleRay(_iProjectionIndex, _iDetectorIndex, p);
_iStoredPixelCount = p.getStoredPixelCount();
}<|fim▁end|>
| |
<|file_name|>canvas_data.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use azure::azure::AzFloat;
use azure::azure_hl::{AntialiasMode, CapStyle, CompositionOp, JoinStyle};
use azure::azure_hl::{BackendType, DrawOptions, DrawTarget, Pattern, StrokeOptions, SurfaceFormat};
use azure::azure_hl::{Color, ColorPattern, DrawSurfaceOptions, Filter, PathBuilder};
use azure::azure_hl::{ExtendMode, GradientStop, LinearGradientPattern, RadialGradientPattern};
use azure::azure_hl::SurfacePattern;
use canvas_traits::canvas::*;
use cssparser::RGBA;
use euclid::{Transform2D, Point2D, Vector2D, Rect, Size2D};
use ipc_channel::ipc::IpcSender;
use num_traits::ToPrimitive;
use serde_bytes::ByteBuf;
use std::mem;
use std::sync::Arc;
use webrender_api;
pub struct CanvasData<'a> {
drawtarget: DrawTarget,
/// TODO(pcwalton): Support multiple paths.
path_builder: PathBuilder,
state: CanvasPaintState<'a>,
saved_states: Vec<CanvasPaintState<'a>>,
webrender_api: webrender_api::RenderApi,
image_key: Option<webrender_api::ImageKey>,
/// An old webrender image key that can be deleted when the next epoch ends.
old_image_key: Option<webrender_api::ImageKey>,
/// An old webrender image key that can be deleted when the current epoch ends.
very_old_image_key: Option<webrender_api::ImageKey>,
pub canvas_id: CanvasId,
}
impl<'a> CanvasData<'a> {
pub fn new(
size: Size2D<i32>,
webrender_api_sender: webrender_api::RenderApiSender,
antialias: AntialiasMode,
canvas_id: CanvasId
) -> CanvasData<'a> {
let draw_target = CanvasData::create(size);
let path_builder = draw_target.create_path_builder();
let webrender_api = webrender_api_sender.create_api();
CanvasData {
drawtarget: draw_target,
path_builder: path_builder,
state: CanvasPaintState::new(antialias),
saved_states: vec![],
webrender_api: webrender_api,
image_key: None,
old_image_key: None,
very_old_image_key: None,
canvas_id: canvas_id,
}
}
pub fn draw_image(
&self,
image_data: Vec<u8>,
image_size: Size2D<f64>,
dest_rect: Rect<f64>,
source_rect: Rect<f64>,
smoothing_enabled: bool
) {
// We round up the floating pixel values to draw the pixels
let source_rect = source_rect.ceil();
// It discards the extra pixels (if any) that won't be painted
let image_data = crop_image(image_data, image_size, source_rect);
let writer = |draw_target: &DrawTarget| {
write_image(&draw_target, image_data, source_rect.size, dest_rect,
smoothing_enabled, self.state.draw_options.composition,
self.state.draw_options.alpha);
};
if self.need_to_draw_shadow() {
let rect = Rect::new(Point2D::new(dest_rect.origin.x as f32, dest_rect.origin.y as f32),
Size2D::new(dest_rect.size.width as f32, dest_rect.size.height as f32));
self.draw_with_shadow(&rect, writer);
} else {
writer(&self.drawtarget);
}
}
pub fn draw_image_self(
&self,
image_size: Size2D<f64>,
dest_rect: Rect<f64>,<|fim▁hole|> smoothing_enabled: bool
) {
// Reads pixels from source image
// In this case source and target are the same canvas
let image_data = self.read_pixels(source_rect.to_i32(), image_size);
// The dimensions of image_data are source_rect.size
self.draw_image(image_data, source_rect.size, dest_rect, source_rect, smoothing_enabled);
}
pub fn save_context_state(&mut self) {
self.saved_states.push(self.state.clone());
}
pub fn restore_context_state(&mut self) {
if let Some(state) = self.saved_states.pop() {
mem::replace(&mut self.state, state);
self.drawtarget.set_transform(&self.state.transform);
self.drawtarget.pop_clip();
}
}
pub fn fill_text(&self, text: String, x: f64, y: f64, max_width: Option<f64>) {
error!("Unimplemented canvas2d.fillText. Values received: {}, {}, {}, {:?}.", text, x, y, max_width);
}
pub fn fill_rect(&self, rect: &Rect<f32>) {
if is_zero_size_gradient(&self.state.fill_style) {
return; // Paint nothing if gradient size is zero.
}
let draw_rect = Rect::new(rect.origin,
match self.state.fill_style {
Pattern::Surface(ref surface) => {
let surface_size = surface.size();
match (surface.repeat_x, surface.repeat_y) {
(true, true) => rect.size,
(true, false) => Size2D::new(rect.size.width, surface_size.height as f32),
(false, true) => Size2D::new(surface_size.width as f32, rect.size.height),
(false, false) => Size2D::new(surface_size.width as f32, surface_size.height as f32),
}
},
_ => rect.size,
}
);
if self.need_to_draw_shadow() {
self.draw_with_shadow(&draw_rect, |new_draw_target: &DrawTarget| {
new_draw_target.fill_rect(&draw_rect, self.state.fill_style.to_pattern_ref(),
Some(&self.state.draw_options));
});
} else {
self.drawtarget.fill_rect(&draw_rect, self.state.fill_style.to_pattern_ref(),
Some(&self.state.draw_options));
}
}
pub fn clear_rect(&self, rect: &Rect<f32>) {
self.drawtarget.clear_rect(rect);
}
pub fn stroke_rect(&self, rect: &Rect<f32>) {
if is_zero_size_gradient(&self.state.stroke_style) {
return; // Paint nothing if gradient size is zero.
}
if self.need_to_draw_shadow() {
self.draw_with_shadow(&rect, |new_draw_target: &DrawTarget| {
new_draw_target.stroke_rect(rect, self.state.stroke_style.to_pattern_ref(),
&self.state.stroke_opts, &self.state.draw_options);
});
} else if rect.size.width == 0. || rect.size.height == 0. {
let cap = match self.state.stroke_opts.line_join {
JoinStyle::Round => CapStyle::Round,
_ => CapStyle::Butt
};
let stroke_opts =
StrokeOptions::new(self.state.stroke_opts.line_width,
self.state.stroke_opts.line_join,
cap,
self.state.stroke_opts.miter_limit,
self.state.stroke_opts.mDashPattern);
self.drawtarget.stroke_line(rect.origin, rect.bottom_right(),
self.state.stroke_style.to_pattern_ref(),
&stroke_opts, &self.state.draw_options);
} else {
self.drawtarget.stroke_rect(rect, self.state.stroke_style.to_pattern_ref(),
&self.state.stroke_opts, &self.state.draw_options);
}
}
pub fn begin_path(&mut self) {
self.path_builder = self.drawtarget.create_path_builder()
}
pub fn close_path(&self) {
self.path_builder.close()
}
pub fn fill(&self) {
if is_zero_size_gradient(&self.state.fill_style) {
return; // Paint nothing if gradient size is zero.
}
self.drawtarget.fill(&self.path_builder.finish(),
self.state.fill_style.to_pattern_ref(),
&self.state.draw_options);
}
pub fn stroke(&self) {
if is_zero_size_gradient(&self.state.stroke_style) {
return; // Paint nothing if gradient size is zero.
}
self.drawtarget.stroke(&self.path_builder.finish(),
self.state.stroke_style.to_pattern_ref(),
&self.state.stroke_opts,
&self.state.draw_options);
}
pub fn clip(&self) {
self.drawtarget.push_clip(&self.path_builder.finish());
}
pub fn is_point_in_path(
&mut self,
x: f64,
y: f64,
_fill_rule: FillRule,
chan: IpcSender<bool>
) {
let path = self.path_builder.finish();
let result = path.contains_point(x, y, &self.state.transform);
self.path_builder = path.copy_to_builder();
chan.send(result).unwrap();
}
pub fn move_to(&self, point: &Point2D<AzFloat>) {
self.path_builder.move_to(*point)
}
pub fn line_to(&self, point: &Point2D<AzFloat>) {
self.path_builder.line_to(*point)
}
pub fn rect(&self, rect: &Rect<f32>) {
self.path_builder.move_to(Point2D::new(rect.origin.x, rect.origin.y));
self.path_builder.line_to(Point2D::new(rect.origin.x + rect.size.width, rect.origin.y));
self.path_builder.line_to(Point2D::new(rect.origin.x + rect.size.width,
rect.origin.y + rect.size.height));
self.path_builder.line_to(Point2D::new(rect.origin.x, rect.origin.y + rect.size.height));
self.path_builder.close();
}
pub fn quadratic_curve_to(
&self,
cp: &Point2D<AzFloat>,
endpoint: &Point2D<AzFloat>
) {
self.path_builder.quadratic_curve_to(cp, endpoint)
}
pub fn bezier_curve_to(
&self,
cp1: &Point2D<AzFloat>,
cp2: &Point2D<AzFloat>,
endpoint: &Point2D<AzFloat>
) {
self.path_builder.bezier_curve_to(cp1, cp2, endpoint)
}
pub fn arc(
&self,
center: &Point2D<AzFloat>,
radius: AzFloat,
start_angle: AzFloat,
end_angle: AzFloat,
ccw: bool
) {
self.path_builder.arc(*center, radius, start_angle, end_angle, ccw)
}
pub fn arc_to(
&self,
cp1: &Point2D<AzFloat>,
cp2: &Point2D<AzFloat>,
radius: AzFloat
) {
let cp0 = self.path_builder.get_current_point();
let cp1 = *cp1;
let cp2 = *cp2;
if (cp0.x == cp1.x && cp0.y == cp1.y) || cp1 == cp2 || radius == 0.0 {
self.line_to(&cp1);
return;
}
// if all three control points lie on a single straight line,
// connect the first two by a straight line
let direction = (cp2.x - cp1.x) * (cp0.y - cp1.y) + (cp2.y - cp1.y) * (cp1.x - cp0.x);
if direction == 0.0 {
self.line_to(&cp1);
return;
}
// otherwise, draw the Arc
let a2 = (cp0.x - cp1.x).powi(2) + (cp0.y - cp1.y).powi(2);
let b2 = (cp1.x - cp2.x).powi(2) + (cp1.y - cp2.y).powi(2);
let d = {
let c2 = (cp0.x - cp2.x).powi(2) + (cp0.y - cp2.y).powi(2);
let cosx = (a2 + b2 - c2) / (2.0 * (a2 * b2).sqrt());
let sinx = (1.0 - cosx.powi(2)).sqrt();
radius / ((1.0 - cosx) / sinx)
};
// first tangent point
let anx = (cp1.x - cp0.x) / a2.sqrt();
let any = (cp1.y - cp0.y) / a2.sqrt();
let tp1 = Point2D::new(cp1.x - anx * d, cp1.y - any * d);
// second tangent point
let bnx = (cp1.x - cp2.x) / b2.sqrt();
let bny = (cp1.y - cp2.y) / b2.sqrt();
let tp2 = Point2D::new(cp1.x - bnx * d, cp1.y - bny * d);
// arc center and angles
let anticlockwise = direction < 0.0;
let cx = tp1.x + any * radius * if anticlockwise { 1.0 } else { -1.0 };
let cy = tp1.y - anx * radius * if anticlockwise { 1.0 } else { -1.0 };
let angle_start = (tp1.y - cy).atan2(tp1.x - cx);
let angle_end = (tp2.y - cy).atan2(tp2.x - cx);
self.line_to(&tp1);
if [cx, cy, angle_start, angle_end].iter().all(|x| x.is_finite()) {
self.arc(&Point2D::new(cx, cy), radius,
angle_start, angle_end, anticlockwise);
}
}
pub fn ellipse(
&mut self,
center: &Point2D<AzFloat>,
radius_x: AzFloat,
radius_y: AzFloat,
rotation_angle: AzFloat,
start_angle: AzFloat,
end_angle: AzFloat,
ccw: bool
) {
self.path_builder.ellipse(*center, radius_x, radius_y, rotation_angle, start_angle, end_angle, ccw);
}
pub fn set_fill_style(&mut self, style: FillOrStrokeStyle) {
if let Some(pattern) = style.to_azure_pattern(&self.drawtarget) {
self.state.fill_style = pattern
}
}
pub fn set_stroke_style(&mut self, style: FillOrStrokeStyle) {
if let Some(pattern) = style.to_azure_pattern(&self.drawtarget) {
self.state.stroke_style = pattern
}
}
pub fn set_line_width(&mut self, width: f32) {
self.state.stroke_opts.line_width = width;
}
pub fn set_line_cap(&mut self, cap: LineCapStyle) {
self.state.stroke_opts.line_cap = cap.to_azure_style();
}
pub fn set_line_join(&mut self, join: LineJoinStyle) {
self.state.stroke_opts.line_join = join.to_azure_style();
}
pub fn set_miter_limit(&mut self, limit: f32) {
self.state.stroke_opts.miter_limit = limit;
}
pub fn set_transform(&mut self, transform: &Transform2D<f32>) {
self.state.transform = transform.clone();
self.drawtarget.set_transform(transform)
}
pub fn set_global_alpha(&mut self, alpha: f32) {
self.state.draw_options.alpha = alpha;
}
pub fn set_global_composition(&mut self, op: CompositionOrBlending) {
self.state.draw_options.set_composition_op(op.to_azure_style());
}
pub fn create(size: Size2D<i32>) -> DrawTarget {
DrawTarget::new(BackendType::Skia, size, SurfaceFormat::B8G8R8A8)
}
pub fn recreate(&mut self, size: Size2D<i32>) {
self.drawtarget = CanvasData::create(size);
self.state = CanvasPaintState::new(self.state.draw_options.antialias);
self.saved_states.clear();
// Webrender doesn't let images change size, so we clear the webrender image key.
// TODO: there is an annying race condition here: the display list builder
// might still be using the old image key. Really, we should be scheduling the image
// for later deletion, not deleting it immediately.
// https://github.com/servo/servo/issues/17534
if let Some(image_key) = self.image_key.take() {
// If this executes, then we are in a new epoch since we last recreated the canvas,
// so `old_image_key` must be `None`.
debug_assert!(self.old_image_key.is_none());
self.old_image_key = Some(image_key);
}
}
pub fn send_pixels(&mut self, chan: IpcSender<Option<ByteBuf>>) {
self.drawtarget.snapshot().get_data_surface().with_data(|element| {
chan.send(Some(Vec::from(element).into())).unwrap();
})
}
pub fn send_data(&mut self, chan: IpcSender<CanvasImageData>) {
self.drawtarget.snapshot().get_data_surface().with_data(|element| {
let size = self.drawtarget.get_size();
let descriptor = webrender_api::ImageDescriptor {
size: webrender_api::DeviceUintSize::new(size.width as u32, size.height as u32),
stride: None,
format: webrender_api::ImageFormat::BGRA8,
offset: 0,
is_opaque: false,
allow_mipmaps: false,
};
let data = webrender_api::ImageData::Raw(Arc::new(element.into()));
let mut txn = webrender_api::Transaction::new();
match self.image_key {
Some(image_key) => {
debug!("Updating image {:?}.", image_key);
txn.update_image(image_key, descriptor, data, None);
}
None => {
self.image_key = Some(self.webrender_api.generate_image_key());
debug!("New image {:?}.", self.image_key);
txn.add_image(self.image_key.unwrap(), descriptor, data, None);
}
}
if let Some(image_key) = mem::replace(&mut self.very_old_image_key, self.old_image_key.take()) {
txn.delete_image(image_key);
}
self.webrender_api.update_resources(txn.resource_updates);
let data = CanvasImageData {
image_key: self.image_key.unwrap(),
};
chan.send(data).unwrap();
})
}
pub fn image_data(
&self,
dest_rect: Rect<i32>,
canvas_size: Size2D<f64>,
chan: IpcSender<ByteBuf>,
) {
let mut dest_data = self.read_pixels(dest_rect, canvas_size);
// bgra -> rgba
byte_swap(&mut dest_data);
chan.send(dest_data.into()).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata
pub fn put_image_data(
&mut self,
imagedata: Vec<u8>,
offset: Vector2D<f64>,
image_data_size: Size2D<f64>,
mut dirty_rect: Rect<f64>
) {
if image_data_size.width <= 0.0 || image_data_size.height <= 0.0 {
return
}
assert_eq!(image_data_size.width * image_data_size.height * 4.0, imagedata.len() as f64);
// Step 1. TODO (neutered data)
// Step 2.
if dirty_rect.size.width < 0.0f64 {
dirty_rect.origin.x += dirty_rect.size.width;
dirty_rect.size.width = -dirty_rect.size.width;
}
if dirty_rect.size.height < 0.0f64 {
dirty_rect.origin.y += dirty_rect.size.height;
dirty_rect.size.height = -dirty_rect.size.height;
}
// Step 3.
if dirty_rect.origin.x < 0.0f64 {
dirty_rect.size.width += dirty_rect.origin.x;
dirty_rect.origin.x = 0.0f64;
}
if dirty_rect.origin.y < 0.0f64 {
dirty_rect.size.height += dirty_rect.origin.y;
dirty_rect.origin.y = 0.0f64;
}
// Step 4.
if dirty_rect.max_x() > image_data_size.width {
dirty_rect.size.width = image_data_size.width - dirty_rect.origin.x;
}
if dirty_rect.max_y() > image_data_size.height {
dirty_rect.size.height = image_data_size.height - dirty_rect.origin.y;
}
// 5) If either dirtyWidth or dirtyHeight is negative or zero,
// stop without affecting any bitmaps
if dirty_rect.size.width <= 0.0 || dirty_rect.size.height <= 0.0 {
return
}
// Step 6.
let dest_rect = dirty_rect.translate(&offset).to_i32();
// azure_hl operates with integers. We need to cast the image size
let image_size = image_data_size.to_i32();
let first_pixel = dest_rect.origin - offset.to_i32();
let mut src_line = (first_pixel.y * (image_size.width * 4) + first_pixel.x * 4) as usize;
let mut dest =
Vec::with_capacity((dest_rect.size.width * dest_rect.size.height * 4) as usize);
for _ in 0 .. dest_rect.size.height {
let mut src_offset = src_line;
for _ in 0 .. dest_rect.size.width {
let alpha = imagedata[src_offset + 3] as u16;
// add 127 before dividing for more accurate rounding
let premultiply_channel = |channel: u8| (((channel as u16 * alpha) + 127) / 255) as u8;
dest.push(premultiply_channel(imagedata[src_offset + 2]));
dest.push(premultiply_channel(imagedata[src_offset + 1]));
dest.push(premultiply_channel(imagedata[src_offset + 0]));
dest.push(imagedata[src_offset + 3]);
src_offset += 4;
}
src_line += (image_size.width * 4) as usize;
}
if let Some(source_surface) = self.drawtarget.create_source_surface_from_data(
&dest,
dest_rect.size,
dest_rect.size.width * 4,
SurfaceFormat::B8G8R8A8) {
self.drawtarget.copy_surface(source_surface,
Rect::new(Point2D::new(0, 0), dest_rect.size),
dest_rect.origin);
}
}
pub fn set_shadow_offset_x(&mut self, value: f64) {
self.state.shadow_offset_x = value;
}
pub fn set_shadow_offset_y(&mut self, value: f64) {
self.state.shadow_offset_y = value;
}
pub fn set_shadow_blur(&mut self, value: f64) {
self.state.shadow_blur = value;
}
pub fn set_shadow_color(&mut self, value: Color) {
self.state.shadow_color = value;
}
// https://html.spec.whatwg.org/multipage/#when-shadows-are-drawn
fn need_to_draw_shadow(&self) -> bool {
self.state.shadow_color.a != 0.0f32 &&
(self.state.shadow_offset_x != 0.0f64 ||
self.state.shadow_offset_y != 0.0f64 ||
self.state.shadow_blur != 0.0f64)
}
fn create_draw_target_for_shadow(&self, source_rect: &Rect<f32>) -> DrawTarget {
let draw_target = self.drawtarget.create_similar_draw_target(&Size2D::new(source_rect.size.width as i32,
source_rect.size.height as i32),
self.drawtarget.get_format());
let matrix = Transform2D::identity()
.pre_translate(-source_rect.origin.to_vector().cast())
.pre_mul(&self.state.transform);
draw_target.set_transform(&matrix);
draw_target
}
fn draw_with_shadow<F>(&self, rect: &Rect<f32>, draw_shadow_source: F)
where F: FnOnce(&DrawTarget)
{
let shadow_src_rect = self.state.transform.transform_rect(rect);
let new_draw_target = self.create_draw_target_for_shadow(&shadow_src_rect);
draw_shadow_source(&new_draw_target);
self.drawtarget.draw_surface_with_shadow(new_draw_target.snapshot(),
&Point2D::new(shadow_src_rect.origin.x as AzFloat,
shadow_src_rect.origin.y as AzFloat),
&self.state.shadow_color,
&Vector2D::new(self.state.shadow_offset_x as AzFloat,
self.state.shadow_offset_y as AzFloat),
(self.state.shadow_blur / 2.0f64) as AzFloat,
self.state.draw_options.composition);
}
/// It reads image data from the canvas
/// canvas_size: The size of the canvas we're reading from
/// read_rect: The area of the canvas we want to read from
pub fn read_pixels(&self, read_rect: Rect<i32>, canvas_size: Size2D<f64>) -> Vec<u8> {
let canvas_size = canvas_size.to_i32();
let canvas_rect = Rect::new(Point2D::new(0i32, 0i32), canvas_size);
let src_read_rect = canvas_rect.intersection(&read_rect).unwrap_or(Rect::zero());
let mut image_data = vec![];
if src_read_rect.is_empty() || canvas_size.width <= 0 && canvas_size.height <= 0 {
return image_data;
}
let data_surface = self.drawtarget.snapshot().get_data_surface();
let mut src_data = Vec::new();
data_surface.with_data(|element| { src_data = element.to_vec(); });
let stride = data_surface.stride();
//start offset of the copyable rectangle
let mut src = (src_read_rect.origin.y * stride + src_read_rect.origin.x * 4) as usize;
//copy the data to the destination vector
for _ in 0..src_read_rect.size.height {
let row = &src_data[src .. src + (4 * src_read_rect.size.width) as usize];
image_data.extend_from_slice(row);
src += stride as usize;
}
image_data
}
}
impl<'a> Drop for CanvasData<'a> {
fn drop(&mut self) {
let mut txn = webrender_api::Transaction::new();
if let Some(image_key) = self.old_image_key.take() {
txn.delete_image(image_key);
}
if let Some(image_key) = self.very_old_image_key.take() {
txn.delete_image(image_key);
}
self.webrender_api.update_resources(txn.resource_updates);
}
}
#[derive(Clone)]
struct CanvasPaintState<'a> {
draw_options: DrawOptions,
fill_style: Pattern,
stroke_style: Pattern,
stroke_opts: StrokeOptions<'a>,
/// The current 2D transform matrix.
transform: Transform2D<f32>,
shadow_offset_x: f64,
shadow_offset_y: f64,
shadow_blur: f64,
shadow_color: Color,
}
impl<'a> CanvasPaintState<'a> {
fn new(antialias: AntialiasMode) -> CanvasPaintState<'a> {
CanvasPaintState {
draw_options: DrawOptions::new(1.0, CompositionOp::Over, antialias),
fill_style: Pattern::Color(ColorPattern::new(Color::black())),
stroke_style: Pattern::Color(ColorPattern::new(Color::black())),
stroke_opts: StrokeOptions::new(1.0, JoinStyle::MiterOrBevel, CapStyle::Butt, 10.0, &[]),
transform: Transform2D::identity(),
shadow_offset_x: 0.0,
shadow_offset_y: 0.0,
shadow_blur: 0.0,
shadow_color: Color::transparent(),
}
}
}
fn is_zero_size_gradient(pattern: &Pattern) -> bool {
if let &Pattern::LinearGradient(ref gradient) = pattern {
if gradient.is_zero_size() {
return true;
}
}
false
}
/// Used by drawImage to get rid of the extra pixels of the image data that
/// won't be copied to the canvas
/// image_data: Color pixel data of the image
/// image_size: Image dimensions
/// crop_rect: It determines the area of the image we want to keep
fn crop_image(
image_data: Vec<u8>,
image_size: Size2D<f64>,
crop_rect: Rect<f64>
) -> Vec<u8> {
// We're going to iterate over a pixel values array so we need integers
let crop_rect = crop_rect.to_i32();
let image_size = image_size.to_i32();
// Assuming 4 bytes per pixel and row-major order for storage
// (consecutive elements in a pixel row of the image are contiguous in memory)
let stride = image_size.width * 4;
let image_bytes_length = image_size.height * image_size.width * 4;
let crop_area_bytes_length = crop_rect.size.height * crop_rect.size.width * 4;
// If the image size is less or equal than the crop area we do nothing
if image_bytes_length <= crop_area_bytes_length {
return image_data;
}
let mut new_image_data = Vec::new();
let mut src = (crop_rect.origin.y * stride + crop_rect.origin.x * 4) as usize;
for _ in 0..crop_rect.size.height {
let row = &image_data[src .. src + (4 * crop_rect.size.width) as usize];
new_image_data.extend_from_slice(row);
src += stride as usize;
}
new_image_data
}
/// It writes an image to the destination target
/// draw_target: the destination target where the image_data will be copied
/// image_data: Pixel information of the image to be written. It takes RGBA8
/// image_size: The size of the image to be written
/// dest_rect: Area of the destination target where the pixels will be copied
/// smoothing_enabled: It determines if smoothing is applied to the image result
fn write_image(
draw_target: &DrawTarget,
mut image_data: Vec<u8>,
image_size: Size2D<f64>,
dest_rect: Rect<f64>,
smoothing_enabled: bool,
composition_op: CompositionOp,
global_alpha: f32
) {
if image_data.is_empty() {
return
}
let image_rect = Rect::new(Point2D::zero(), image_size);
// rgba -> bgra
byte_swap(&mut image_data);
// From spec https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
// When scaling up, if the imageSmoothingEnabled attribute is set to true, the user agent should attempt
// to apply a smoothing algorithm to the image data when it is scaled.
// Otherwise, the image must be rendered using nearest-neighbor interpolation.
let filter = if smoothing_enabled {
Filter::Linear
} else {
Filter::Point
};
// azure_hl operates with integers. We need to cast the image size
let image_size = image_size.to_i32();
if let Some(source_surface) =
draw_target.create_source_surface_from_data(&image_data,
image_size,
image_size.width * 4,
SurfaceFormat::B8G8R8A8) {
let draw_surface_options = DrawSurfaceOptions::new(filter, true);
let draw_options = DrawOptions::new(global_alpha, composition_op, AntialiasMode::None);
draw_target.draw_surface(source_surface,
dest_rect.to_azure_style(),
image_rect.to_azure_style(),
draw_surface_options,
draw_options);
}
}
pub trait PointToi32 {
fn to_i32(&self) -> Point2D<i32>;
}
impl PointToi32 for Point2D<f64> {
fn to_i32(&self) -> Point2D<i32> {
Point2D::new(self.x.to_i32().unwrap(),
self.y.to_i32().unwrap())
}
}
pub trait SizeToi32 {
fn to_i32(&self) -> Size2D<i32>;
}
impl SizeToi32 for Size2D<f64> {
fn to_i32(&self) -> Size2D<i32> {
Size2D::new(self.width.to_i32().unwrap(),
self.height.to_i32().unwrap())
}
}
pub trait RectToi32 {
fn to_i32(&self) -> Rect<i32>;
fn ceil(&self) -> Rect<f64>;
}
impl RectToi32 for Rect<f64> {
fn to_i32(&self) -> Rect<i32> {
Rect::new(Point2D::new(self.origin.x.to_i32().unwrap(),
self.origin.y.to_i32().unwrap()),
Size2D::new(self.size.width.to_i32().unwrap(),
self.size.height.to_i32().unwrap()))
}
fn ceil(&self) -> Rect<f64> {
Rect::new(Point2D::new(self.origin.x.ceil(),
self.origin.y.ceil()),
Size2D::new(self.size.width.ceil(),
self.size.height.ceil()))
}
}
pub trait ToAzureStyle {
type Target;
fn to_azure_style(self) -> Self::Target;
}
impl ToAzureStyle for Rect<f64> {
type Target = Rect<AzFloat>;
fn to_azure_style(self) -> Rect<AzFloat> {
Rect::new(Point2D::new(self.origin.x as AzFloat, self.origin.y as AzFloat),
Size2D::new(self.size.width as AzFloat, self.size.height as AzFloat))
}
}
impl ToAzureStyle for LineCapStyle {
type Target = CapStyle;
fn to_azure_style(self) -> CapStyle {
match self {
LineCapStyle::Butt => CapStyle::Butt,
LineCapStyle::Round => CapStyle::Round,
LineCapStyle::Square => CapStyle::Square,
}
}
}
impl ToAzureStyle for LineJoinStyle {
type Target = JoinStyle;
fn to_azure_style(self) -> JoinStyle {
match self {
LineJoinStyle::Round => JoinStyle::Round,
LineJoinStyle::Bevel => JoinStyle::Bevel,
LineJoinStyle::Miter => JoinStyle::Miter,
}
}
}
impl ToAzureStyle for CompositionStyle {
type Target = CompositionOp;
fn to_azure_style(self) -> CompositionOp {
match self {
CompositionStyle::SrcIn => CompositionOp::In,
CompositionStyle::SrcOut => CompositionOp::Out,
CompositionStyle::SrcOver => CompositionOp::Over,
CompositionStyle::SrcAtop => CompositionOp::Atop,
CompositionStyle::DestIn => CompositionOp::DestIn,
CompositionStyle::DestOut => CompositionOp::DestOut,
CompositionStyle::DestOver => CompositionOp::DestOver,
CompositionStyle::DestAtop => CompositionOp::DestAtop,
CompositionStyle::Copy => CompositionOp::Source,
CompositionStyle::Lighter => CompositionOp::Add,
CompositionStyle::Xor => CompositionOp::Xor,
}
}
}
impl ToAzureStyle for BlendingStyle {
type Target = CompositionOp;
fn to_azure_style(self) -> CompositionOp {
match self {
BlendingStyle::Multiply => CompositionOp::Multiply,
BlendingStyle::Screen => CompositionOp::Screen,
BlendingStyle::Overlay => CompositionOp::Overlay,
BlendingStyle::Darken => CompositionOp::Darken,
BlendingStyle::Lighten => CompositionOp::Lighten,
BlendingStyle::ColorDodge => CompositionOp::ColorDodge,
BlendingStyle::ColorBurn => CompositionOp::ColorBurn,
BlendingStyle::HardLight => CompositionOp::HardLight,
BlendingStyle::SoftLight => CompositionOp::SoftLight,
BlendingStyle::Difference => CompositionOp::Difference,
BlendingStyle::Exclusion => CompositionOp::Exclusion,
BlendingStyle::Hue => CompositionOp::Hue,
BlendingStyle::Saturation => CompositionOp::Saturation,
BlendingStyle::Color => CompositionOp::Color,
BlendingStyle::Luminosity => CompositionOp::Luminosity,
}
}
}
impl ToAzureStyle for CompositionOrBlending {
type Target = CompositionOp;
fn to_azure_style(self) -> CompositionOp {
match self {
CompositionOrBlending::Composition(op) => op.to_azure_style(),
CompositionOrBlending::Blending(op) => op.to_azure_style(),
}
}
}
pub trait ToAzurePattern {
fn to_azure_pattern(&self, drawtarget: &DrawTarget) -> Option<Pattern>;
}
impl ToAzurePattern for FillOrStrokeStyle {
fn to_azure_pattern(&self, drawtarget: &DrawTarget) -> Option<Pattern> {
match *self {
FillOrStrokeStyle::Color(ref color) => {
Some(Pattern::Color(ColorPattern::new(color.to_azure_style())))
},
FillOrStrokeStyle::LinearGradient(ref linear_gradient_style) => {
let gradient_stops: Vec<GradientStop> = linear_gradient_style.stops.iter().map(|s| {
GradientStop {
offset: s.offset as AzFloat,
color: s.color.to_azure_style()
}
}).collect();
Some(Pattern::LinearGradient(LinearGradientPattern::new(
&Point2D::new(linear_gradient_style.x0 as AzFloat, linear_gradient_style.y0 as AzFloat),
&Point2D::new(linear_gradient_style.x1 as AzFloat, linear_gradient_style.y1 as AzFloat),
drawtarget.create_gradient_stops(&gradient_stops, ExtendMode::Clamp),
&Transform2D::identity())))
},
FillOrStrokeStyle::RadialGradient(ref radial_gradient_style) => {
let gradient_stops: Vec<GradientStop> = radial_gradient_style.stops.iter().map(|s| {
GradientStop {
offset: s.offset as AzFloat,
color: s.color.to_azure_style()
}
}).collect();
Some(Pattern::RadialGradient(RadialGradientPattern::new(
&Point2D::new(radial_gradient_style.x0 as AzFloat, radial_gradient_style.y0 as AzFloat),
&Point2D::new(radial_gradient_style.x1 as AzFloat, radial_gradient_style.y1 as AzFloat),
radial_gradient_style.r0 as AzFloat, radial_gradient_style.r1 as AzFloat,
drawtarget.create_gradient_stops(&gradient_stops, ExtendMode::Clamp),
&Transform2D::identity())))
},
FillOrStrokeStyle::Surface(ref surface_style) => {
drawtarget.create_source_surface_from_data(&surface_style.surface_data,
surface_style.surface_size,
surface_style.surface_size.width * 4,
SurfaceFormat::B8G8R8A8)
.map(|source_surface| {
Pattern::Surface(SurfacePattern::new(
source_surface.azure_source_surface,
surface_style.repeat_x,
surface_style.repeat_y,
&Transform2D::identity()))
})
}
}
}
}
impl ToAzureStyle for RGBA {
type Target = Color;
fn to_azure_style(self) -> Color {
Color::rgba(self.red_f32() as AzFloat,
self.green_f32() as AzFloat,
self.blue_f32() as AzFloat,
self.alpha_f32() as AzFloat)
}
}<|fim▁end|>
|
source_rect: Rect<f64>,
|
<|file_name|>pubsublite_v1_generated_topic_stats_service_compute_time_cursor_async.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ComputeTimeCursor
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-pubsublite
# [START pubsublite_v1_generated_TopicStatsService_ComputeTimeCursor_async]<|fim▁hole|>async def sample_compute_time_cursor():
# Create a client
client = pubsublite_v1.TopicStatsServiceAsyncClient()
# Initialize request argument(s)
request = pubsublite_v1.ComputeTimeCursorRequest(
topic="topic_value",
partition=986,
)
# Make the request
response = await client.compute_time_cursor(request=request)
# Handle the response
print(response)
# [END pubsublite_v1_generated_TopicStatsService_ComputeTimeCursor_async]<|fim▁end|>
|
from google.cloud import pubsublite_v1
|
<|file_name|>updates.js<|end_file_name|><|fim▁begin|>// telegram.link
// Copyright 2014 Enrico Stara '[email protected]'
// Released under the MIT License
// http://telegram.link
// Dependencies:
var api = require('../api');
var utility = require('../utility');
// ***<|fim▁hole|>// This module wraps API methods required to manage the session updates
// See [Api Methods](https://core.telegram.org/methods#working-with-updates)
// Access only via Client object (like client.updates) and `updates` instance property
function Updates(client) {
this.client = client;
}
// ***
// **Event: **`'method name'`
// Each of the following methods emits an event with the same name when done, an `error` event otherwise.
// ***
// updates.**getState([callback])**
// Return a Promise to get the current state of updates.
// [Click here for more details](https://core.telegram.org/method/updates.getState)
// The code:
Updates.prototype.getState = function (callback) {
return utility.callService(api.service.updates.getState, this.client, this.client._channel, callback, arguments);
};
// ***
// updates.**getDifference(pts, date, qts, [callback])**
// Return a Promise to get the difference between the current state of updates and transmitted.
// [Click here for more details](https://core.telegram.org/method/updates.getDifference)
// The code:
Updates.prototype.getDifference = function (pts, date, qts, callback) {
return utility.callService(api.service.updates.getDifference, this.client, this.client._channel, callback, arguments);
};
// Export the class
module.exports = exports = Updates;<|fim▁end|>
| |
<|file_name|>secrets.py<|end_file_name|><|fim▁begin|>from key_vault_agent import KeyVaultAgent
<|fim▁hole|> def get_secret(self):
self.data_client.restore_secret()
pass<|fim▁end|>
|
class SecretsAgent(KeyVaultAgent):
|
<|file_name|>get_thumbnails.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
"""
Author: Gary Foreman
Created: August 6, 2016
This script scrapes thumbnail images from thread links in the For Sale: Bass
Guitars forum at talkbass.com
"""
from __future__ import print_function
from glob import glob
import os
import sys
import urllib
from PIL import Image, ImageOps
import pymongo
sys.path.append('..')
from utilities.utilities import pause_scrape, report_progress
MIN_PAUSE_SECONDS = 0.15
MAX_PAUSE_SECONDS = 0.5
REPORT_MESSAGE = 'Scraped image'
REPORT_FREQUENCY = 300
DATA_PATH = os.path.join('..', 'data', 'images')
def make_data_dir():
"""
Checks to see whether DATA_PATH exists. If not, creates it.
"""
if not os.path.isdir(DATA_PATH):
os.makedirs(DATA_PATH)
def filename_from_url(thumbnail_url):
"""
thumbnail_url : a string with a url to a bass image
Strips filename from the end of thumbnail_url and prepends DATA_PATH.
Also ensures the file extension is jpg
"""
filename = thumbnail_url.strip('/').split('/')[-1]
basename, ext = os.path.splitext(filename)
return os.path.join(DATA_PATH, basename + '.jpg')
def download_thumb(thumbnail_url):
"""
thumbnail_url : a string with a url to a bass image
Pulls dowm image from thumbnail_url and stores in DATA_DIR
"""
filename = filename_from_url(thumbnail_url)
try:
urllib.urlretrieve(thumbnail_url, filename)
except IOError:
# URL is not an image file
pass
except UnicodeError:
# URL contains non-ASCII characters
pass
def crop_image(filename):<|fim▁hole|> """
try:
img = Image.open(filename)
img = ImageOps.fit(img, (128, 128), Image.ANTIALIAS)
img.save(filename)
except NameError:
# File does not exist
pass
except IOError:
# Image is corrupted
try:
os.remove(filename)
except OSError:
# Filename is too long
pass
def main():
make_data_dir()
# Establish connection to MongoDB open on port 27017
client = pymongo.MongoClient()
# Access threads database
db = client.for_sale_bass_guitars
# Get database documents
cursor = db.threads.find()
# Get list of images that have already been scraped
scraped_image_list = glob(os.path.join(DATA_PATH, '*.jpg'))
thumbnail_url_list = []
for document in cursor:
thumbnail_url = document[u'image_url']
try:
filename = filename_from_url(thumbnail_url)
if filename not in scraped_image_list:
thumbnail_url_list.append(thumbnail_url)
except AttributeError:
# thread has no associated thumbnail
pass
client.close()
thumbnail_count = 1
for thumbnail_url in thumbnail_url_list:
download_thumb(thumbnail_url)
filename = filename_from_url(thumbnail_url)
crop_image(filename)
pause_scrape(MIN_PAUSE_SECONDS, MAX_PAUSE_SECONDS)
report_progress(thumbnail_count, REPORT_MESSAGE, REPORT_FREQUENCY)
thumbnail_count += 1
if __name__ == "__main__":
main()<|fim▁end|>
|
"""
filename: a string with the name to a locally stored image file
Crops image at filename to 128 x 128 pixels and overwrites original
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
r"""
.. _SoftiMAX:
SoftiMAX at MAX IV
------------------
The images below are produced by scripts in
``\examples\withRaycing\14_SoftiMAX``.
The beamline will have two branches:
- STXM (Scanning Transmission X-ray Microscopy) and
- CXI (Coherent X-ray Imaging),
see the scheme provided by K. Thånell.
.. imagezoom:: _images/softiMAX_layout.*
STXM branch
~~~~~~~~~~~
.. rubric:: Rays vs. hybrid
The propagation through the first optical elements – from undulator to front
end (FE) slit, to M1, to M2 and to plane grating (PG) – is done with rays:
+------------+------------+------------+------------+
| FE | M1 | M2 | PG |
+============+============+============+============+
| |st_rFE| | |st_rM1| | |st_rM2| | |st_rPG| |
+------------+------------+------------+------------+
.. |st_rFE| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-00-FE.*
.. |st_rM1| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-01-M1local.*
.. |st_rM2| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-02-M2local.*
.. |st_rPG| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-02a-PGlocal.*
:loc: upper-right-corner
Starting from PG – to M3, to exit slit, to Fresnel zone plate (FZP) and to
variously positioned sample screen – the propagation is done by rays or waves,
as compared below. Despite the M3 footprint looks not perfect (not black at
periphery), the field at normal surfaces (exit slit, FZP (not shown) and sample
screen) is of perfect quality. At the best focus, rays and waves result in a
similar image. Notice a micron-sized depth of focus.
+-----------+---------------------+---------------------+
| | rays | wave |
+===========+=====================+=====================+
| M3 | |st_rM3| | |st_hM3| |
+-----------+---------------------+---------------------+
| exit slit | |st_rES| | |st_hES| |
+-----------+---------------------+---------------------+
| sample | |st_rS| | |st_hS| |
+-----------+---------------------+---------------------+
.. |st_rM3| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-03-M3local.*
.. |st_hM3| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |st_rES| imagezoom:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-04-ExitSlit.*
.. |st_hES| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-04-ExitSlit.*
:loc: upper-right-corner
.. |st_rS| animation:: _images/stxm-2D-1-rays-0emit-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Influence of emittance
Non-zero emittance radiation is treated in xrt by incoherent addition of single
electron intensities. The single electron (filament) fields are considered as
fully coherent and are resulted from filament trajectories (one per repeat)
that attain positional and angular shifts within the given emittance
distribution. The following images are calculated for the exit slit and the
focus screen for zero and non-zero emittance
(for MAX IV 3 GeV ring: ε\ :sub:`x`\ =263 pm·rad,
β\ :sub:`x`\ =9 m, ε\ :sub:`z`\ =8 pm·rad, β\ :sub:`z`\ =2 m). At the real
emittance, the horizontal focal size increases by ~75%. A finite energy band,
as determined by vertical size of the exit slit, results in somewhat bigger
broadening due to a chromatic dependence of the focal length.
+-----------+---------------------+---------------------+---------------------+
| | 0 emittance | real emittance | |refeb| |
+===========+=====================+=====================+=====================+
| exit slit | |st_hESb| | |st_hES2| | |st_hES3| |
+-----------+---------------------+---------------------+---------------------+
| sample | |st_hSb| | |st_hS2| | |st_hS3| |
+-----------+---------------------+---------------------+---------------------+
.. |refeb| replace:: real emittance, finite energy band
.. |st_hESb| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-04-ExitSlit.*
.. |st_hES2| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-04-ExitSlit.*
.. |st_hS2| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hES3| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-wideE-04-ExitSlit.*
:loc: upper-right-corner
.. |st_hSb| imagezoom:: _images/stxm-2D-2-hybr-0emit-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS3| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-wideE-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Correction of emittance effects
The increased focal size can be amended by closing the exit slit. With flux
loss of about 2/3, the focal size is almost restored.
+-----------+--------------------+--------------------+
| | 80 µm exit slit | 20 µm exit slit |
+===========+====================+====================+<|fim▁hole|>+-----------+--------------------+--------------------+
.. |st_hES2b| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-04-ExitSlit.*
.. |st_hES4| imagezoom:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-025H-04-ExitSlit.*
:loc: upper-right-corner
.. |st_hS2b| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-06i-ExpFocus-Is
.. |st_hS4| animation:: _images/stxm-2D-2-hybr-non0e-0enSpread-monoE-025H-06i-ExpFocus-Is
:loc: upper-right-corner
.. rubric:: Coherence signatures
The beam improvement can also be viewed via the coherence properties by the
four available methods (see :ref:`coh_signs`). As the horizontal exit slit
becomes smaller, one can observe the increase of the coherent fraction ζ and
the increase of the primary (coherent) mode weight. The width of degree of
coherence (DoC) relative to the width of the intensity distribution determines
the coherent beam fraction. Both widths vary with varying screen position
around the focal point such that their ratio is not invariant, so that the
coherent fraction also varies, which is counter-intuitive. An important
advantage of the eigen-mode or PCA methods is a simple definition of the
coherent fraction as the eigenvalue of the zeroth mode (component); this
eigenvalue appears to be invariant around the focal point, see below. Note that
the methods 2 and 3 give equal results. The method 4 that gives the degree of
transverse coherence (DoTC) is also invariant around the focal point, see DoTC
values on the pictures of Principal Components.
+-----------+--------------------------+--------------------------+
| | 80 µm exit slit | 20 µm exit slit |
+===========+==========================+==========================+
| method 1 | |st_hS80m1| | |st_hS20m1| |
+-----------+--------------------------+--------------------------+
| method 2 | |st_hS80m3| | |st_hS20m3| |
+-----------+--------------------------+--------------------------+
| method 3, | |st_hS80m4| | |st_hS20m4| |
| method 4b | | |
+-----------+--------------------------+--------------------------+
.. |st_hS80m1| animation:: _images/stxm-IDOC-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m1| animation:: _images/stxm-IDOC-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
.. |st_hS80m3| animation:: _images/stxm-Modes-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m3| animation:: _images/stxm-Modes-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
.. |st_hS80m4| animation:: _images/stxm-PCA-2D-2-hybr-non0e-0enSpread-monoE
.. |st_hS20m4| animation:: _images/stxm-PCA-2D-2-hybr-non0e-0enSpread-monoE-025H
:loc: upper-right-corner
CXI branch
~~~~~~~~~~
.. rubric:: 2D vs 1D
Although the sample screen images are of good quality (the dark field is almost
black), the mirror footprints may be noisy and not well convergent in the
periphery. Compare the M3 footprint with that in the previous section (STXM
branch) where the difference is in the mirror area and thus in the sample
density. The used 10\ :sup:`6` wave samples (i.e. 10\ :sup:`12` possible paths)
are not enough for the slightly enlarged area in the present example. The
propagation is therefore performed in separated horizontal and vertical
directions, which dramatically improves the quality of the footprints.
Disadvantages of the cuts are losses in visual representation and incorrect
evaluation of the flux.
+------+----------------------+-----------------------+-----------------------+
| | 2D | 1D horizontal cut | 1D vertical cut |
+======+======================+=======================+=======================+
| |M3| | |cxiM32D| | |cxiM31Dh| | |cxiM31Dv| |
+------+----------------------+-----------------------+-----------------------+
| |SS| | |cxiS2D| | |cxiS1Dh| | |cxiS1Dv| |
+------+----------------------+-----------------------+-----------------------+
.. |M3| replace:: M3 footprint
.. |SS| replace:: sample screen
.. |cxiM32D| imagezoom:: _images/cxi_2D-2-hybr-0emit-0enSpread-monoE-03-M3local.*
.. |cxiM31Dh| imagezoom:: _images/cxi_1D-2-hybr-1e6hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxiM31Dv| imagezoom:: _images/cxi_1D-2-hybr-1e6ver-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |cxiS2D| animation:: _images/cxi_S2D
.. |cxiS1Dh| animation:: _images/cxi_S1Dh
.. |cxiS1Dv| animation:: _images/cxi_S1Dv
:loc: upper-right-corner
.. _wavefronts:
.. rubric:: Flat screen vs normal-to-k screen (wave front)
The following images demonstrate the correctness of the directional
Kirchhoff-like integral (see :ref:`seq_prop`). Five diffraction integrals are
calculated on flat screens around the focus position: for two polarizations and
for three directional components. The latter ones define the wave fronts at
every flat screen position; these wave fronts are further used as new curved
screens. The calculated diffraction fields on these curved screens have narrow
phase distributions, as shown by the color histograms, which is indeed expected
for a wave front by its definition. In contrast, the *flat* screens at the same
positions have rapid phase variation over several Fresnel zones.
.. note::
In the process of wave propagation, wave fronts -- surfaces of
constant phase -- are not used in any way. We therefore call it “wave
propagation”, not “wave *front* propagation” as frequently called by
others. The wave fronts in this example were calculated to solely
demonstrate the correctness of the local propagation directions after
having calculated the diffracted field.
+------------------------------+------------------------------+
| flat screen | curved screen (wave front) |
+==============================+==============================+
| |cxiFlat| | |cxiFront| |
+------------------------------+------------------------------+
.. |cxiFlat| animation:: _images/cxi-S1DhFlat
.. |cxiFront| animation:: _images/cxi-S1DhFront
:loc: upper-right-corner
The curvature of the calculated wave fronts varies across the focus position.
The wave fronts become more flat as one approaches the focus, see the figure
below. This is in contrast to *ray* propagation, where the angular ray
distribution is invariant at any position between two optical elements.
.. imagezoom:: _images/cxi_waveFronts.*
.. rubric:: Rays, waves and hybrid
The following images are horizontal cuts at the footprints and sample screens
calculated by
- rays,
- rays + waves hybrid (rays up to PG and wave from PG) and
- purely by waves.
+-----------------+-------------------+-------------------+-------------------+
| | rays | hybrid | waves |
+=================+===================+===================+===================+
| front end slit | |cxi-hFE| | same as rays | |cxi-wFE| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M1 | |cxi-hM1| | same as rays | |cxi-wM1| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M2 | |cxi-hM2| | same as rays | |cxi-wM2| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on PG | |cxi-hPG| | same as rays | |cxi-wPG| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M3 | |cxi-rM3| | |cxi-hM3| | |cxi-wM3| |
+-----------------+-------------------+-------------------+-------------------+
| exit slit | |cxi-rES| | |cxi-hES| | |cxi-wES| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M4 | |cxi-rM4| | |cxi-hM4| | |cxi-wM4| |
+-----------------+-------------------+-------------------+-------------------+
| footprint on M5 | |cxi-rM5| | |cxi-hM5| | |cxi-wM5| |
+-----------------+-------------------+-------------------+-------------------+
| sample screen | |cxi-rS| | |cxi-hS| | |cxi-wS| |
+-----------------+-------------------+-------------------+-------------------+
.. |cxi-hFE| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-00-FE.*
.. |cxi-wFE| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-00-FE.*
:loc: upper-right-corner
.. |cxi-hM1| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-01-M1local.*
.. |cxi-wM1| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-01-M1local.*
:loc: upper-right-corner
.. |cxi-hM2| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-02-M2local.*
.. |cxi-wM2| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-02-M2local.*
:loc: upper-right-corner
.. |cxi-hPG| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-02-PGlocal.*
.. |cxi-wPG| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-02-PGlocal.*
:loc: upper-right-corner
.. |cxi-rM3| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxi-hM3| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-03-M3local.*
.. |cxi-wM3| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-03-M3local.*
:loc: upper-right-corner
.. |cxi-rES| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-04-ExitSlit.*
.. |cxi-hES| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-04-ExitSlit.*
.. |cxi-wES| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-04-ExitSlit.*
:loc: upper-right-corner
.. |cxi-rM4| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-05-M4local.*
.. |cxi-hM4| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-05-M4local.*
.. |cxi-wM4| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-05-M4local.*
:loc: upper-right-corner
.. |cxi-rM5| imagezoom:: _images/cxi_1D-1-rays-hor-0emit-0enSpread-monoE-06-M5local.*
.. |cxi-hM5| imagezoom:: _images/cxi_1D-2-hybr-hor-0emit-0enSpread-monoE-06-M5local.*
.. |cxi-wM5| imagezoom:: _images/cxi_1D-3-wave-hor-0emit-0enSpread-monoE-06-M5local.*
:loc: upper-right-corner
.. |cxi-rS| animation:: _images/cxi-rS
.. |cxi-hS| animation:: _images/cxi-hS
.. |cxi-wS| animation:: _images/cxi-wS
:loc: upper-right-corner
.. rubric:: Coherence signatures
This section demonstrates the methods 1 and 3 from :ref:`coh_signs`. Notice
again the difficulty in determining the width of DoC owing to its complex shape
(at real emittance) or the restricted field of view (the 0 emittance case). In
contrast, the eigen mode analysis yields an almost invariant well defined
coherent fraction.
+-----------+--------------------------+--------------------------+
| | 0 emittance | real emittance |
+===========+==========================+==========================+
| method 1 | |cxi-coh1-0emit| | |cxi-coh1-non0e| |
+-----------+--------------------------+--------------------------+
| method 3 | |cxi-coh3-0emit| | |cxi-coh3-non0e| |
+-----------+--------------------------+--------------------------+
.. |cxi-coh1-0emit| animation:: _images/cxi-coh1-0emit
.. |cxi-coh1-non0e| animation:: _images/cxi-coh1-non0e
.. |cxi-coh3-0emit| animation:: _images/cxi-coh3-0emit
.. |cxi-coh3-non0e| animation:: _images/cxi-coh3-non0e
:loc: upper-right-corner
"""
pass<|fim▁end|>
|
| exit slit | |st_hES2b| | |st_hES4| |
+-----------+--------------------+--------------------+
| sample | |st_hS2b| | |st_hS4| |
|
<|file_name|>SlotBackpack.java<|end_file_name|><|fim▁begin|>package com.darkona.adventurebackpack.inventory;
import com.darkona.adventurebackpack.common.IInventoryAdventureBackpack;
import com.darkona.adventurebackpack.init.ModBlocks;
import com.darkona.adventurebackpack.item.ItemAdventureBackpack;
import com.darkona.adventurebackpack.util.Utils;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
/**
* Created by Darkona on 12/10/2014.
*/
public class SlotBackpack extends SlotAdventureBackpack
{
public SlotBackpack(IInventoryAdventureBackpack inventory, int id, int x, int y)
{
super(inventory, id, x, y);
}
@Override
public boolean isItemValid(ItemStack stack)
{
return (!(stack.getItem() instanceof ItemAdventureBackpack) && !(stack.getItem() == Item.getItemFromBlock(ModBlocks.blockBackpack)));
}<|fim▁hole|>
@Override
public void onPickupFromSlot(EntityPlayer p_82870_1_, ItemStack p_82870_2_)
{
super.onPickupFromSlot(p_82870_1_, p_82870_2_);
}
}<|fim▁end|>
| |
<|file_name|>step3_load_model.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Author: Shao Zhang and Phil Saltzman
# Last Updated: 2015-03-13
#
# This tutorial is intended as a initial panda scripting lesson going over
# display initialization, loading models, placing objects, and the scene graph.
#
# Step 3: In this step, we create a function called loadPlanets, which will
# eventually be used to load all of the planets in our simulation. For now
# we will load just the sun and and the sky-sphere we use to create the
# star-field.
from direct.showbase.ShowBase import ShowBase
base = ShowBase()
from panda3d.core import NodePath, TextNode
from direct.gui.DirectGui import *
import sys
class World(object):
def __init__(self):<|fim▁hole|> parent=base.a2dBottomRight, align=TextNode.A_right,
style=1, fg=(1, 1, 1, 1), pos=(-0.1, 0.1), scale=.07)
base.setBackgroundColor(0, 0, 0) # Set the background to black
base.disableMouse() # disable mouse control of the camera
camera.setPos(0, 0, 45) # Set the camera position (X, Y, Z)
camera.setHpr(0, -90, 0) # Set the camera orientation
#(heading, pitch, roll) in degrees
# We will now define a variable to help keep a consistent scale in
# our model. As we progress, we will continue to add variables here as we
# need them
# The value of this variable scales the size of the planets. True scale size
# would be 1
self.sizescale = 0.6
# Now that we have finished basic initialization, we call loadPlanets which
# will handle actually getting our objects in the world
self.loadPlanets()
def loadPlanets(self):
# Here, inside our class, is where we are creating the loadPlanets function
# For now we are just loading the star-field and sun. In the next step we
# will load all of the planets
# Loading objects in Panda is done via the command loader.loadModel, which
# takes one argument, the path to the model file. Models in Panda come in
# two types, .egg (which is readable in a text editor), and .bam (which is
# not readable but makes smaller files). When you load a file you leave the
# extension off so that it can choose the right version
# Load model returns a NodePath, which you can think of as an object
# containing your model
# Here we load the sky model. For all the planets we will use the same
# sphere model and simply change textures. However, even though the sky is
# a sphere, it is different from the planet model because its polygons
#(which are always one-sided in Panda) face inside the sphere instead of
# outside (this is known as a model with reversed normals). Because of
# that it has to be a separate model.
self.sky = loader.loadModel("models/solar_sky_sphere")
# After the object is loaded, it must be placed in the scene. We do this by
# changing the parent of self.sky to render, which is a special NodePath.
# Each frame, Panda starts with render and renders everything attached to
# it.
self.sky.reparentTo(render)
# You can set the position, orientation, and scale on a NodePath the same
# way that you set those properties on the camera. In fact, the camera is
# just another special NodePath
self.sky.setScale(40)
# Very often, the egg file will know what textures are needed and load them
# automatically. But sometimes we want to set our textures manually, (for
# instance we want to put different textures on the same planet model)
# Loading textures works the same way as loading models, but instead of
# calling loader.loadModel, we call loader.loadTexture
self.sky_tex = loader.loadTexture("models/stars_1k_tex.jpg")
# Finally, the following line sets our new sky texture on our sky model.
# The second argument must be one or the command will be ignored.
self.sky.setTexture(self.sky_tex, 1)
# Now we load the sun.
self.sun = loader.loadModel("models/planet_sphere")
# Now we repeat our other steps
self.sun.reparentTo(render)
self.sun_tex = loader.loadTexture("models/sun_1k_tex.jpg")
self.sun.setTexture(self.sun_tex, 1)
# The sun is really much bigger than
self.sun.setScale(2 * self.sizescale)
# this, but to be able to see the
# planets we're making it smaller
# end loadPlanets()
# end class world
# instantiate the class
w = World()
base.run()<|fim▁end|>
|
# This is the initialization we had before
self.title = OnscreenText( # Create the title
text="Panda3D: Tutorial 1 - Solar System",
|
<|file_name|>UltimateGoalInsights.ts<|end_file_name|><|fim▁begin|>import Insights from '../Insights';
import Match from '../Match';
import {ISerializable} from '../ISerializable';
export default class UltimateGoalInsights extends Insights implements ISerializable {
private _autoAverageRingsScoredHigh: number;
private _autoAverageRingsScoredMid: number;
private _autoAverageRingsScoredLow: number;
private _autoAveragePowerShots: number;
private _autoPercentWobblesDelivered: number;
private _autoPercentNavigated: number;
private _teleAverageRingsScoredHigh: number;
private _teleAverageRingsScoredMid: number;
private _teleAverageRingsScoredLow: number;
private _endAverageRingsOnWobble: number;
private _endPercentWobblesOnStart: number;
private _endPercentWobblesInDropZone: number;
private _endAveragePowerShots: number;
constructor() {
super();
this._autoAverageRingsScoredHigh = 0;
this._autoAverageRingsScoredMid = 0;
this._autoAverageRingsScoredLow = 0;
this._autoAveragePowerShots = 0;
this._autoPercentWobblesDelivered = 0;
this._autoPercentNavigated = 0;
this._teleAverageRingsScoredHigh = 0;
this._teleAverageRingsScoredMid = 0;
this._teleAverageRingsScoredLow = 0;
this._endAverageRingsOnWobble = 0;
this._endPercentWobblesOnStart = 0;
this._endPercentWobblesInDropZone = 0;
this._endAveragePowerShots = 0;
}
toJSON(): object {<|fim▁hole|> return {
high_score_match: this.highScoreMatch,
average_match_score: this.averageMatchScore,
average_winning_score: this.averageWinningScore,
average_winning_margin: this.averageWinningMargin,
game: {
auto_average_rings_scored_high: this.autoAverageRingsScoredHigh,
auto_average_rings_scored_mid: this.autoAverageRingsScoredMid,
auto_average_rings_scored_low: this.autoAverageRingsScoredLow,
auto_average_power_shots: this.autoAveragePowerShots,
auto_percent_wobbles_delivered: this.autoPercentWobblesDelivered,
auto_percent_navigated: this.autoPercentNavigated,
tele_average_rings_scored_high: this.teleAverageRingsScoredHigh,
tele_average_rings_scored_mid: this.teleAverageRingsScoredMid,
tele_average_rings_scored_low: this.teleAverageRingsScoredLow,
end_average_rings_on_wobble: this.endAverageRingsOnWobble,
end_percent_wobbles_on_start: this.endPercentWobblesOnStart,
end_percent_wobbles_in_drop_zone: this.endPercentWobblesInDropZone,
end_average_power_shots: this.endAveragePowerShots
}
}
}
fromJSON(json: any): UltimateGoalInsights {
const insights = new UltimateGoalInsights();
insights.highScoreMatch = json.high_score_match ? new Match().fromJSON(json.high_score_match) : null;
insights.averageMatchScore = json.average_match_score;
insights.averageWinningScore = json.average_winning_score;
insights.averageWinningMargin = json.average_winning_margin;
insights.averageMajorPenalties = json.average_major_penalty;
insights.averageMinorPenalties = json.average_minor_penalty;
insights.autoAverageRingsScoredHigh = json.game.auto_average_rings_scored_high;
insights.autoAverageRingsScoredMid = json.game.auto_average_rings_scored_mid;
insights.autoAverageRingsScoredLow = json.game.auto_average_rings_scored_low;
insights.autoAveragePowerShots = json.game.auto_average_power_shots;
insights.autoPercentWobblesDelivered = json.game.auto_percent_wobbles_delivered;
insights.autoPercentNavigated = json.game.auto_percent_navigated;
insights.teleAverageRingsScoredHigh = json.game.tele_average_rings_scored_high;
insights.teleAverageRingsScoredMid = json.game.tele_average_rings_scored_mid;
insights.teleAverageRingsScoredLow = json.game.tele_average_rings_scored_low;
insights.endAverageRingsOnWobble = json.game.end_average_rings_on_wobble;
insights.endPercentWobblesOnStart = json.game.end_percent_wobbles_on_start;
insights.endPercentWobblesInDropZone = json.game.end_percent_wobbles_in_drop_zone;
insights.endAveragePowerShots = json.game.end_average_power_shots;
return insights;
}
get autoAverageRingsScoredHigh(): number {
return this._autoAverageRingsScoredHigh;
}
set autoAverageRingsScoredHigh(value: number) {
this._autoAverageRingsScoredHigh = value;
}
get autoAverageRingsScoredMid(): number {
return this._autoAverageRingsScoredMid;
}
set autoAverageRingsScoredMid(value: number) {
this._autoAverageRingsScoredMid = value;
}
get autoAverageRingsScoredLow(): number {
return this._autoAverageRingsScoredLow;
}
set autoAverageRingsScoredLow(value: number) {
this._autoAverageRingsScoredLow = value;
}
get autoAveragePowerShots(): number {
return this._autoAveragePowerShots;
}
set autoAveragePowerShots(value: number) {
this._autoAveragePowerShots = value;
}
get autoPercentWobblesDelivered(): number {
return this._autoPercentWobblesDelivered;
}
set autoPercentWobblesDelivered(value: number) {
this._autoPercentWobblesDelivered = value;
}
get autoPercentNavigated(): number {
return this._autoPercentNavigated;
}
set autoPercentNavigated(value: number) {
this._autoPercentNavigated = value;
}
get teleAverageRingsScoredHigh(): number {
return this._teleAverageRingsScoredHigh;
}
set teleAverageRingsScoredHigh(value: number) {
this._teleAverageRingsScoredHigh = value;
}
get teleAverageRingsScoredMid(): number {
return this._teleAverageRingsScoredMid;
}
set teleAverageRingsScoredMid(value: number) {
this._teleAverageRingsScoredMid = value;
}
get teleAverageRingsScoredLow(): number {
return this._teleAverageRingsScoredLow;
}
set teleAverageRingsScoredLow(value: number) {
this._teleAverageRingsScoredLow = value;
}
get endAverageRingsOnWobble(): number {
return this._endAverageRingsOnWobble;
}
set endAverageRingsOnWobble(value: number) {
this._endAverageRingsOnWobble = value;
}
get endPercentWobblesOnStart(): number {
return this._endPercentWobblesOnStart;
}
set endPercentWobblesOnStart(value: number) {
this._endPercentWobblesOnStart = value;
}
get endPercentWobblesInDropZone(): number {
return this._endPercentWobblesInDropZone;
}
set endPercentWobblesInDropZone(value: number) {
this._endPercentWobblesInDropZone = value;
}
get endAveragePowerShots(): number {
return this._endAveragePowerShots;
}
set endAveragePowerShots(value: number) {
this._endAveragePowerShots = value;
}
}<|fim▁end|>
| |
<|file_name|>tshirt-outline.js<|end_file_name|><|fim▁begin|>import React from 'react'
import Icon from 'react-icon-base'
const IoTshirtOutline = props => (
<Icon viewBox="0 0 40 40" {...props}>
<g><path d="m11.4 6.7l-8.1 2.4 0.8 2.5 3.1-0.3 3-0.4-0.2 3-1.1 19.9h17.2l-1.1-19.9-0.2-3 3 0.4 3.1 0.3 0.8-2.5-8.1-2.4c-0.5 0.6-1 1.1-1.6 1.5-1.2 0.8-2.7 1.2-4.5 1.2-2.7-0.1-4.6-0.9-6.1-2.7z m11.1-2.9l12.5 3.7-2.5 6.9-5-0.6 1.3 22.5h-22.5l1.2-22.5-5 0.6-2.5-6.9 12.5-3.7c1.1 2.1 2.4 3 5 3.1 2.6 0 3.9-1 5-3.1z"/></g><|fim▁hole|><|fim▁end|>
|
</Icon>
)
export default IoTshirtOutline
|
<|file_name|>test_weights.py<|end_file_name|><|fim▁begin|># Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler weights.
"""
from nova import context
from nova import exception
from nova.openstack.common.fixture import mockpatch
from nova.scheduler import weights
from nova import test
from nova.tests import matchers
from nova.tests.scheduler import fakes
class TestWeighedHost(test.NoDBTestCase):
def test_dict_conversion(self):
host_state = fakes.FakeHostState('somehost', None, {})
host = weights.WeighedHost(host_state, 'someweight')
expected = {'weight': 'someweight',
'host': 'somehost'}
self.assertThat(host.to_dict(), matchers.DictMatches(expected))
def test_all_weighers(self):
classes = weights.all_weighers()
class_names = [cls.__name__ for cls in classes]
self.assertEqual(len(classes), 2)
self.assertIn('RAMWeigher', class_names)
self.assertIn('MetricsWeigher', class_names)
class RamWeigherTestCase(test.NoDBTestCase):
def setUp(self):
super(RamWeigherTestCase, self).setUp()
self.useFixture(mockpatch.Patch(
'nova.db.compute_node_get_all',
return_value=fakes.COMPUTE_NODES))
self.host_manager = fakes.FakeHostManager()
self.weight_handler = weights.HostWeightHandler()
self.weight_classes = self.weight_handler.get_matching_classes(
['nova.scheduler.weights.ram.RAMWeigher'])
def _get_weighed_host(self, hosts, weight_properties=None):<|fim▁hole|>
def _get_all_hosts(self):
ctxt = context.get_admin_context()
return self.host_manager.get_all_host_states(ctxt)
def test_default_of_spreading_first(self):
hostinfo_list = self._get_all_hosts()
# host1: free_ram_mb=512
# host2: free_ram_mb=1024
# host3: free_ram_mb=3072
# host4: free_ram_mb=8192
# so, host4 should win:
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(weighed_host.weight, 1.0)
self.assertEqual(weighed_host.obj.host, 'host4')
def test_ram_filter_multiplier1(self):
self.flags(ram_weight_multiplier=0.0)
hostinfo_list = self._get_all_hosts()
# host1: free_ram_mb=512
# host2: free_ram_mb=1024
# host3: free_ram_mb=3072
# host4: free_ram_mb=8192
# We do not know the host, all have same weight.
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(weighed_host.weight, 0.0)
def test_ram_filter_multiplier2(self):
self.flags(ram_weight_multiplier=2.0)
hostinfo_list = self._get_all_hosts()
# host1: free_ram_mb=512
# host2: free_ram_mb=1024
# host3: free_ram_mb=3072
# host4: free_ram_mb=8192
# so, host4 should win:
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(weighed_host.weight, 1.0 * 2)
self.assertEqual(weighed_host.obj.host, 'host4')
def test_ram_filter_negative(self):
self.flags(ram_weight_multiplier=1.0)
hostinfo_list = self._get_all_hosts()
host_attr = {'id': 100, 'memory_mb': 8192, 'free_ram_mb': -512}
host_state = fakes.FakeHostState('negative', 'negative', host_attr)
hostinfo_list = list(hostinfo_list) + [host_state]
# host1: free_ram_mb=512
# host2: free_ram_mb=1024
# host3: free_ram_mb=3072
# host4: free_ram_mb=8192
# negativehost: free_ram_mb=-512
# so, host4 should win
weights = self.weight_handler.get_weighed_objects(self.weight_classes,
hostinfo_list, {})
weighed_host = weights[0]
self.assertEqual(weighed_host.weight, 1)
self.assertEqual(weighed_host.obj.host, "host4")
# and negativehost should lose
weighed_host = weights[-1]
self.assertEqual(weighed_host.weight, 0)
self.assertEqual(weighed_host.obj.host, "negative")
class MetricsWeigherTestCase(test.NoDBTestCase):
def setUp(self):
super(MetricsWeigherTestCase, self).setUp()
self.useFixture(mockpatch.Patch(
'nova.db.compute_node_get_all',
return_value=fakes.COMPUTE_NODES_METRICS))
self.host_manager = fakes.FakeHostManager()
self.weight_handler = weights.HostWeightHandler()
self.weight_classes = self.weight_handler.get_matching_classes(
['nova.scheduler.weights.metrics.MetricsWeigher'])
def _get_weighed_host(self, hosts, setting, weight_properties=None):
if not weight_properties:
weight_properties = {}
self.flags(weight_setting=setting, group='metrics')
return self.weight_handler.get_weighed_objects(self.weight_classes,
hosts, weight_properties)[0]
def _get_all_hosts(self):
ctxt = context.get_admin_context()
return self.host_manager.get_all_host_states(ctxt)
def _do_test(self, settings, expected_weight, expected_host):
hostinfo_list = self._get_all_hosts()
weighed_host = self._get_weighed_host(hostinfo_list, settings)
self.assertEqual(weighed_host.weight, expected_weight)
self.assertEqual(weighed_host.obj.host, expected_host)
def test_single_resource(self):
# host1: foo=512
# host2: foo=1024
# host3: foo=3072
# host4: foo=8192
# so, host4 should win:
setting = ['foo=1']
self._do_test(setting, 1.0, 'host4')
def test_multiple_resource(self):
# host1: foo=512, bar=1
# host2: foo=1024, bar=2
# host3: foo=3072, bar=1
# host4: foo=8192, bar=0
# so, host2 should win:
setting = ['foo=0.0001', 'bar=1']
self._do_test(setting, 1.0, 'host2')
def test_single_resourcenegtive_ratio(self):
# host1: foo=512
# host2: foo=1024
# host3: foo=3072
# host4: foo=8192
# so, host1 should win:
setting = ['foo=-1']
self._do_test(setting, 1.0, 'host1')
def test_multiple_resource_missing_ratio(self):
# host1: foo=512, bar=1
# host2: foo=1024, bar=2
# host3: foo=3072, bar=1
# host4: foo=8192, bar=0
# so, host4 should win:
setting = ['foo=0.0001', 'bar']
self._do_test(setting, 1.0, 'host4')
def test_multiple_resource_wrong_ratio(self):
# host1: foo=512, bar=1
# host2: foo=1024, bar=2
# host3: foo=3072, bar=1
# host4: foo=8192, bar=0
# so, host4 should win:
setting = ['foo=0.0001', 'bar = 2.0t']
self._do_test(setting, 1.0, 'host4')
def _check_parsing_result(self, weigher, setting, results):
self.flags(weight_setting=setting, group='metrics')
weigher._parse_setting()
self.assertEqual(len(weigher.setting), len(results))
for item in results:
self.assertIn(item, weigher.setting)
def test_parse_setting(self):
weigher = self.weight_classes[0]()
self._check_parsing_result(weigher,
['foo=1'],
[('foo', 1.0)])
self._check_parsing_result(weigher,
['foo=1', 'bar=-2.1'],
[('foo', 1.0), ('bar', -2.1)])
self._check_parsing_result(weigher,
['foo=a1', 'bar=-2.1'],
[('bar', -2.1)])
self._check_parsing_result(weigher,
['foo', 'bar=-2.1'],
[('bar', -2.1)])
self._check_parsing_result(weigher,
['=5', 'bar=-2.1'],
[('bar', -2.1)])
def test_metric_not_found_required(self):
setting = ['foo=1', 'zot=2']
self.assertRaises(exception.ComputeHostMetricNotFound,
self._do_test,
setting,
8192,
'host4')
def test_metric_not_found_non_required(self):
# host1: foo=512, bar=1
# host2: foo=1024, bar=2
# host3: foo=3072, bar=1
# host4: foo=8192, bar=0
# host5: foo=768, bar=0, zot=1
# host6: foo=2048, bar=0, zot=2
# so, host5 should win:
self.flags(required=False, group='metrics')
setting = ['foo=0.0001', 'zot=-1']
self._do_test(setting, 1.0, 'host5')<|fim▁end|>
|
if weight_properties is None:
weight_properties = {}
return self.weight_handler.get_weighed_objects(self.weight_classes,
hosts, weight_properties)[0]
|
<|file_name|>factory.js<|end_file_name|><|fim▁begin|>/**
* @license Apache-2.0
*
* Copyright (c) 2020 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var isPositiveInteger = require( '@stdlib/math/base/assert/is-positive-integer' );
var constantFunction = require( '@stdlib/utils/constant-function' );
var isfinite = require( '@stdlib/math/base/assert/is-finite' );
var round = require( '@stdlib/math/base/special/round' );
var isnan = require( '@stdlib/math/base/assert/is-nan' );
var exp = require( '@stdlib/math/base/special/exp' );
var LN2 = require( '@stdlib/constants/float64/ln-two' );
var weights = require( './weights.js' );
// MAIN //
/**
* Returns a function for evaluating the cumulative distribution function (CDF) for the distribution of the Wilcoxon signed rank test statistic with `n` observations.
*
* @param {PositiveInteger} n - number of observations<|fim▁hole|>* @example
* var cdf = factory( 8 );
* var y = cdf( 3.9 );
* // returns ~0.027
*
* y = cdf( 17.0 );
* // returns ~0.473
*/
function factory( n ) {
var mlim;
var pui;
if ( !isPositiveInteger( n ) || !isfinite( n ) ) {
return constantFunction( NaN );
}
pui = exp( -n * LN2 );
mlim = n * ( n + 1 ) / 2;
return cdf;
/**
* Evaluates the cumulative distribution function (CDF) for the distribution of the Wilcoxon signed rank test statistic.
*
* @private
* @param {number} x - input value
* @returns {Probability} evaluated CDF
*
* @example
* var y = cdf( 2 );
* // returns <number>
*/
function cdf( x ) {
var i;
var p;
if ( isnan( x ) ) {
return NaN;
}
if ( x < 0.0 ) {
return 0.0;
}
x = round( x );
if ( x >= mlim ) {
return 1.0;
}
p = 0;
for ( i = 0; i <= x; i++ ) {
p += weights( i, n ) * pui;
}
return p;
}
}
// EXPORTS //
module.exports = factory;<|fim▁end|>
|
* @returns {Function} CDF
*
|
<|file_name|>console.py<|end_file_name|><|fim▁begin|>"""a readline console module (unix only).
[email protected]
the module starts a subprocess for the readline console and
communicates through pipes (prompt/cmd).
the console is polled through a timer, which depends on PySide.
"""
from select import select
import os
import sys
import signal
if __name__ == '__main__':
import readline
# prompt input stream
fd_in = int(sys.argv[1])
file_in = os.fdopen( fd_in )
# cmd output stream<|fim▁hole|> fd_out = int(sys.argv[2])
file_out = os.fdopen( fd_out, 'w' )
# some helpers
def send(data):
file_out.write(data + '\n')
file_out.flush()
def recv():
while True:
res = file_in.readline().rstrip('\n')
read, _, _ = select([ file_in ], [], [], 0)
if not read: return res
class History:
"""readline history safe open/close"""
def __init__(self, filename):
self.filename = os.path.expanduser( filename )
def __enter__(self):
try:
readline.read_history_file(self.filename)
# print 'loaded console history from', self.filename
except IOError:
pass
return self
def __exit__(self, type, value, traceback):
readline.write_history_file( self.filename )
def cleanup(*args):
print('console cleanup')
os.system('stty sane')
for sig in [signal.SIGQUIT,
signal.SIGTERM,
signal.SIGILL,
signal.SIGSEGV]:
old = signal.getsignal(sig)
def new(*args):
cleanup()
signal.signal(sig, old)
os.kill(os.getpid(), sig)
signal.signal(sig, new)
# main loop
try:
with History( "~/.sofa-console" ):
print 'console started'
while True:
send( raw_input( recv() ) )
except KeyboardInterrupt:
print 'console exited (SIGINT)'
except EOFError:
ppid = os.getppid()
try:
os.kill(os.getppid(), signal.SIGTERM)
print 'console exited (EOF), terminating parent process'
except OSError:
pass
else:
import subprocess
import code
import atexit
_cleanup = None
def _register( c ):
global _cleanup
if _cleanup: _cleanup()
_cleanup = c
class Console(code.InteractiveConsole):
def __init__(self, locals = None, timeout = 100):
"""
python interpreter taking input from console subprocess
scope is provided through 'locals' (usually: locals() or globals())
'timeout' (in milliseconds) sets how often is the console polled.
"""
code.InteractiveConsole.__init__(self, locals)
if timeout >= 0:
def callback():
self.poll()
from PySide import QtCore
self.timer = QtCore.QTimer()
self.timer.timeout.connect( callback )
self.timer.start( timeout )
_register( lambda: self.timer.stop() )
# execute next command, blocks on console input
def next(self):
line = recv()
data = '>>> '
if self.push( line ):
data = '... '
send( data )
# convenience
def poll(self):
if ready(): self.next()
# send prompt to indicate we are ready
def send(data):
prompt_out.write(data + '\n')
prompt_out.flush()
# receive command line
def recv():
res = cmd_in.readline()
if res: return res.rstrip('\n')
return res
# is there any available command ?
def ready():
read, _, _ = select([ cmd_in ], [], [], 0)
return read
# communication pipes
prompt = os.pipe()
cmd = os.pipe()
# subprocess with in/out fd, and forwarding stdin
sub = subprocess.Popen(['python', __file__,
str(prompt[0]), str(cmd[1])],
stdin = sys.stdin)
# open the tubes !
prompt_out = os.fdopen(prompt[1], 'w')
cmd_in = os.fdopen(cmd[0], 'r')
# we're ready
send('>>> ')
# def cleanup(*args):
# print('console cleanup')
# os.system('stty sane')
# def exit(*args):
# print 'exit'
# cleanup()
# sys.exit(0) forces cleanup *from python* before the gui
# closes. otherwise pyside causes segfault on python finalize.
def handler(*args):
sub.terminate()
sub.wait()
sys.exit(0)
from PySide import QtCore
app = QtCore.QCoreApplication.instance()
app.aboutToQuit.connect( handler )
# import atexit
# atexit.register( handler )
# import atexit
# atexit.register( exit )
# for sig in [signal.SIGSEGV, signal.SIGILL]:
# old = signal.getsignal(sig)
# def h(*args):
# print args
# sub.terminate()
# signal.signal(sig, old)
# os.kill(os.getpid(), sig)
# signal.signal(sig, h)<|fim▁end|>
| |
<|file_name|>ipmitool.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 NTT DOCOMO, INC.
# Copyright 2014 International Business Machines Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
IPMI power manager driver.
Uses the 'ipmitool' command (http://ipmitool.sourceforge.net/) to remotely
manage hardware. This includes setting the boot device, getting a
serial-over-LAN console, and controlling the power state of the machine.
NOTE THAT CERTAIN DISTROS MAY INSTALL openipmi BY DEFAULT, INSTEAD OF ipmitool,
WHICH PROVIDES DIFFERENT COMMAND-LINE OPTIONS AND *IS NOT SUPPORTED* BY THIS
DRIVER.
"""
import contextlib
import os
import re
import subprocess
import tempfile
import time
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import excutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common.i18n import _LI
from ironic.common.i18n import _LW
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.drivers import base
from ironic.drivers.modules import console_utils
CONF = cfg.CONF
CONF.import_opt('retry_timeout',
'ironic.drivers.modules.ipminative',
group='ipmi')
CONF.import_opt('min_command_interval',
'ironic.drivers.modules.ipminative',
group='ipmi')
LOG = logging.getLogger(__name__)
VALID_PRIV_LEVELS = ['ADMINISTRATOR', 'CALLBACK', 'OPERATOR', 'USER']
VALID_PROTO_VERSIONS = ('2.0', '1.5')
REQUIRED_PROPERTIES = {<|fim▁hole|> 'ipmi_priv_level': _("privilege level; default is ADMINISTRATOR. One of "
"%s. Optional.") % ', '.join(VALID_PRIV_LEVELS),
'ipmi_username': _("username; default is NULL user. Optional."),
'ipmi_bridging': _("bridging_type; default is \"no\". One of \"single\", "
"\"dual\", \"no\". Optional."),
'ipmi_transit_channel': _("transit channel for bridged request. Required "
"only if ipmi_bridging is set to \"dual\"."),
'ipmi_transit_address': _("transit address for bridged request. Required "
"only if ipmi_bridging is set to \"dual\"."),
'ipmi_target_channel': _("destination channel for bridged request. "
"Required only if ipmi_bridging is set to "
"\"single\" or \"dual\"."),
'ipmi_target_address': _("destination address for bridged request. "
"Required only if ipmi_bridging is set "
"to \"single\" or \"dual\"."),
'ipmi_local_address': _("local IPMB address for bridged requests. "
"Used only if ipmi_bridging is set "
"to \"single\" or \"dual\". Optional."),
'ipmi_protocol_version': _('the version of the IPMI protocol; default '
'is "2.0". One of "1.5", "2.0". Optional.'),
}
COMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()
COMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)
CONSOLE_PROPERTIES = {
'ipmi_terminal_port': _("node's UDP port to connect to. Only required for "
"console access.")
}
BRIDGING_OPTIONS = [('local_address', '-m'),
('transit_channel', '-B'), ('transit_address', '-T'),
('target_channel', '-b'), ('target_address', '-t')]
LAST_CMD_TIME = {}
TIMING_SUPPORT = None
SINGLE_BRIDGE_SUPPORT = None
DUAL_BRIDGE_SUPPORT = None
TMP_DIR_CHECKED = None
ipmitool_command_options = {
'timing': ['ipmitool', '-N', '0', '-R', '0', '-h'],
'single_bridge': ['ipmitool', '-m', '0', '-b', '0', '-t', '0', '-h'],
'dual_bridge': ['ipmitool', '-m', '0', '-b', '0', '-t', '0',
'-B', '0', '-T', '0', '-h']}
# Note(TheJulia): This string is hardcoded in ipmitool's lanplus driver
# and is substituted in return for the error code received from the IPMI
# controller. As of 1.8.15, no internationalization support appears to
# be in ipmitool which means the string should always be returned in this
# form regardless of locale.
IPMITOOL_RETRYABLE_FAILURES = ['insufficient resources for session']
def _check_option_support(options):
"""Checks if the specific ipmitool options are supported on host.
This method updates the module-level variables indicating whether
an option is supported so that it is accessible by any driver
interface class in this module. It is intended to be called from
the __init__ method of such classes only.
:param options: list of ipmitool options to be checked
:raises: OSError
"""
for opt in options:
if _is_option_supported(opt) is None:
try:
cmd = ipmitool_command_options[opt]
# NOTE(cinerama): use subprocess.check_call to
# check options & suppress ipmitool output to
# avoid alarming people
with open(os.devnull, 'wb') as nullfile:
subprocess.check_call(cmd, stdout=nullfile,
stderr=nullfile)
except subprocess.CalledProcessError:
LOG.info(_LI("Option %(opt)s is not supported by ipmitool"),
{'opt': opt})
_is_option_supported(opt, False)
else:
LOG.info(_LI("Option %(opt)s is supported by ipmitool"),
{'opt': opt})
_is_option_supported(opt, True)
def _is_option_supported(option, is_supported=None):
"""Indicates whether the particular ipmitool option is supported.
:param option: specific ipmitool option
:param is_supported: Optional Boolean. when specified, this value
is assigned to the module-level variable indicating
whether the option is supported. Used only if a value
is not already assigned.
:returns: True, indicates the option is supported
:returns: False, indicates the option is not supported
:returns: None, indicates that it is not aware whether the option
is supported
"""
global SINGLE_BRIDGE_SUPPORT
global DUAL_BRIDGE_SUPPORT
global TIMING_SUPPORT
if option == 'single_bridge':
if (SINGLE_BRIDGE_SUPPORT is None) and (is_supported is not None):
SINGLE_BRIDGE_SUPPORT = is_supported
return SINGLE_BRIDGE_SUPPORT
elif option == 'dual_bridge':
if (DUAL_BRIDGE_SUPPORT is None) and (is_supported is not None):
DUAL_BRIDGE_SUPPORT = is_supported
return DUAL_BRIDGE_SUPPORT
elif option == 'timing':
if (TIMING_SUPPORT is None) and (is_supported is not None):
TIMING_SUPPORT = is_supported
return TIMING_SUPPORT
def _console_pwfile_path(uuid):
"""Return the file path for storing the ipmi password for a console."""
file_name = "%(uuid)s.pw" % {'uuid': uuid}
return os.path.join(CONF.tempdir, file_name)
@contextlib.contextmanager
def _make_password_file(password):
"""Makes a temporary file that contains the password.
:param password: the password
:returns: the absolute pathname of the temporary file
:raises: PasswordFileFailedToCreate from creating or writing to the
temporary file
"""
f = None
try:
f = tempfile.NamedTemporaryFile(mode='w', dir=CONF.tempdir)
f.write(str(password))
f.flush()
except (IOError, OSError) as exc:
if f is not None:
f.close()
raise exception.PasswordFileFailedToCreate(error=exc)
except Exception:
with excutils.save_and_reraise_exception():
if f is not None:
f.close()
try:
# NOTE(jlvillal): This yield can not be in the try/except block above
# because an exception by the caller of this function would then get
# changed to a PasswordFileFailedToCreate exception which would mislead
# about the problem and its cause.
yield f.name
finally:
if f is not None:
f.close()
def _parse_driver_info(node):
"""Gets the parameters required for ipmitool to access the node.
:param node: the Node of interest.
:returns: dictionary of parameters.
:raises: InvalidParameterValue when an invalid value is specified
:raises: MissingParameterValue when a required ipmi parameter is missing.
"""
info = node.driver_info or {}
bridging_types = ['single', 'dual']
missing_info = [key for key in REQUIRED_PROPERTIES if not info.get(key)]
if missing_info:
raise exception.MissingParameterValue(_(
"Missing the following IPMI credentials in node's"
" driver_info: %s.") % missing_info)
address = info.get('ipmi_address')
username = info.get('ipmi_username')
password = info.get('ipmi_password')
port = info.get('ipmi_terminal_port')
priv_level = info.get('ipmi_priv_level', 'ADMINISTRATOR')
bridging_type = info.get('ipmi_bridging', 'no')
local_address = info.get('ipmi_local_address')
transit_channel = info.get('ipmi_transit_channel')
transit_address = info.get('ipmi_transit_address')
target_channel = info.get('ipmi_target_channel')
target_address = info.get('ipmi_target_address')
protocol_version = str(info.get('ipmi_protocol_version', '2.0'))
if protocol_version not in VALID_PROTO_VERSIONS:
valid_versions = ', '.join(VALID_PROTO_VERSIONS)
raise exception.InvalidParameterValue(_(
"Invalid IPMI protocol version value %(version)s, the valid "
"value can be one of %(valid_versions)s") %
{'version': protocol_version, 'valid_versions': valid_versions})
if port:
try:
port = int(port)
except ValueError:
raise exception.InvalidParameterValue(_(
"IPMI terminal port is not an integer."))
# check if ipmi_bridging has proper value
if bridging_type == 'no':
# if bridging is not selected, then set all bridging params to None
(local_address, transit_channel, transit_address, target_channel,
target_address) = (None,) * 5
elif bridging_type in bridging_types:
# check if the particular bridging option is supported on host
if not _is_option_supported('%s_bridge' % bridging_type):
raise exception.InvalidParameterValue(_(
"Value for ipmi_bridging is provided as %s, but IPMI "
"bridging is not supported by the IPMI utility installed "
"on host. Ensure ipmitool version is > 1.8.11"
) % bridging_type)
# ensure that all the required parameters are provided
params_undefined = [param for param, value in [
("ipmi_target_channel", target_channel),
('ipmi_target_address', target_address)] if value is None]
if bridging_type == 'dual':
params_undefined2 = [param for param, value in [
("ipmi_transit_channel", transit_channel),
('ipmi_transit_address', transit_address)
] if value is None]
params_undefined.extend(params_undefined2)
else:
# if single bridging was selected, set dual bridge params to None
transit_channel = transit_address = None
# If the required parameters were not provided,
# raise an exception
if params_undefined:
raise exception.MissingParameterValue(_(
"%(param)s not provided") % {'param': params_undefined})
else:
raise exception.InvalidParameterValue(_(
"Invalid value for ipmi_bridging: %(bridging_type)s,"
" the valid value can be one of: %(bridging_types)s"
) % {'bridging_type': bridging_type,
'bridging_types': bridging_types + ['no']})
if priv_level not in VALID_PRIV_LEVELS:
valid_priv_lvls = ', '.join(VALID_PRIV_LEVELS)
raise exception.InvalidParameterValue(_(
"Invalid privilege level value:%(priv_level)s, the valid value"
" can be one of %(valid_levels)s") %
{'priv_level': priv_level, 'valid_levels': valid_priv_lvls})
return {
'address': address,
'username': username,
'password': password,
'port': port,
'uuid': node.uuid,
'priv_level': priv_level,
'local_address': local_address,
'transit_channel': transit_channel,
'transit_address': transit_address,
'target_channel': target_channel,
'target_address': target_address,
'protocol_version': protocol_version,
}
def _exec_ipmitool(driver_info, command):
"""Execute the ipmitool command.
:param driver_info: the ipmitool parameters for accessing a node.
:param command: the ipmitool command to be executed.
:returns: (stdout, stderr) from executing the command.
:raises: PasswordFileFailedToCreate from creating or writing to the
temporary file.
:raises: processutils.ProcessExecutionError from executing the command.
"""
ipmi_version = ('lanplus'
if driver_info['protocol_version'] == '2.0'
else 'lan')
args = ['ipmitool',
'-I',
ipmi_version,
'-H',
driver_info['address'],
'-L', driver_info['priv_level']
]
if driver_info['username']:
args.append('-U')
args.append(driver_info['username'])
for name, option in BRIDGING_OPTIONS:
if driver_info[name] is not None:
args.append(option)
args.append(driver_info[name])
# specify retry timing more precisely, if supported
num_tries = max(
(CONF.ipmi.retry_timeout // CONF.ipmi.min_command_interval), 1)
if _is_option_supported('timing'):
args.append('-R')
args.append(str(num_tries))
args.append('-N')
args.append(str(CONF.ipmi.min_command_interval))
end_time = (time.time() + CONF.ipmi.retry_timeout)
while True:
num_tries = num_tries - 1
# NOTE(deva): ensure that no communications are sent to a BMC more
# often than once every min_command_interval seconds.
time_till_next_poll = CONF.ipmi.min_command_interval - (
time.time() - LAST_CMD_TIME.get(driver_info['address'], 0))
if time_till_next_poll > 0:
time.sleep(time_till_next_poll)
# Resetting the list that will be utilized so the password arguments
# from any previous execution are preserved.
cmd_args = args[:]
# 'ipmitool' command will prompt password if there is no '-f'
# option, we set it to '\0' to write a password file to support
# empty password
with _make_password_file(driver_info['password'] or '\0') as pw_file:
cmd_args.append('-f')
cmd_args.append(pw_file)
cmd_args.extend(command.split(" "))
try:
out, err = utils.execute(*cmd_args)
return out, err
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception() as ctxt:
err_list = [x for x in IPMITOOL_RETRYABLE_FAILURES
if x in e.args[0]]
if ((time.time() > end_time) or
(num_tries == 0) or
not err_list):
LOG.error(_LE('IPMI Error while attempting "%(cmd)s"'
'for node %(node)s. Error: %(error)s'), {
'node': driver_info['uuid'],
'cmd': e.cmd, 'error': e
})
else:
ctxt.reraise = False
LOG.warning(_LW('IPMI Error encountered, retrying '
'"%(cmd)s" for node %(node)s. '
'Error: %(error)s'), {
'node': driver_info['uuid'],
'cmd': e.cmd, 'error': e
})
finally:
LAST_CMD_TIME[driver_info['address']] = time.time()
def _sleep_time(iter):
"""Return the time-to-sleep for the n'th iteration of a retry loop.
This implementation increases exponentially.
:param iter: iteration number
:returns: number of seconds to sleep
"""
if iter <= 1:
return 1
return iter ** 2
def _set_and_wait(target_state, driver_info):
"""Helper function for DynamicLoopingCall.
This method changes the power state and polls the BMCuntil the desired
power state is reached, or CONF.ipmi.retry_timeout would be exceeded by the
next iteration.
This method assumes the caller knows the current power state and does not
check it prior to changing the power state. Most BMCs should be fine, but
if a driver is concerned, the state should be checked prior to calling this
method.
:param target_state: desired power state
:param driver_info: the ipmitool parameters for accessing a node.
:returns: one of ironic.common.states
"""
if target_state == states.POWER_ON:
state_name = "on"
elif target_state == states.POWER_OFF:
state_name = "off"
def _wait(mutable):
try:
# Only issue power change command once
if mutable['iter'] < 0:
_exec_ipmitool(driver_info, "power %s" % state_name)
else:
mutable['power'] = _power_status(driver_info)
except (exception.PasswordFileFailedToCreate,
processutils.ProcessExecutionError,
exception.IPMIFailure):
# Log failures but keep trying
LOG.warning(_LW("IPMI power %(state)s failed for node %(node)s."),
{'state': state_name, 'node': driver_info['uuid']})
finally:
mutable['iter'] += 1
if mutable['power'] == target_state:
raise loopingcall.LoopingCallDone()
sleep_time = _sleep_time(mutable['iter'])
if (sleep_time + mutable['total_time']) > CONF.ipmi.retry_timeout:
# Stop if the next loop would exceed maximum retry_timeout
LOG.error(_LE('IPMI power %(state)s timed out after '
'%(tries)s retries on node %(node_id)s.'),
{'state': state_name, 'tries': mutable['iter'],
'node_id': driver_info['uuid']})
mutable['power'] = states.ERROR
raise loopingcall.LoopingCallDone()
else:
mutable['total_time'] += sleep_time
return sleep_time
# Use mutable objects so the looped method can change them.
# Start 'iter' from -1 so that the first two checks are one second apart.
status = {'power': None, 'iter': -1, 'total_time': 0}
timer = loopingcall.DynamicLoopingCall(_wait, status)
timer.start().wait()
return status['power']
def _power_on(driver_info):
"""Turn the power ON for this node.
:param driver_info: the ipmitool parameters for accessing a node.
:returns: one of ironic.common.states POWER_ON or ERROR.
:raises: IPMIFailure on an error from ipmitool (from _power_status call).
"""
return _set_and_wait(states.POWER_ON, driver_info)
def _power_off(driver_info):
"""Turn the power OFF for this node.
:param driver_info: the ipmitool parameters for accessing a node.
:returns: one of ironic.common.states POWER_OFF or ERROR.
:raises: IPMIFailure on an error from ipmitool (from _power_status call).
"""
return _set_and_wait(states.POWER_OFF, driver_info)
def _power_status(driver_info):
"""Get the power status for a node.
:param driver_info: the ipmitool access parameters for a node.
:returns: one of ironic.common.states POWER_OFF, POWER_ON or ERROR.
:raises: IPMIFailure on an error from ipmitool.
"""
cmd = "power status"
try:
out_err = _exec_ipmitool(driver_info, cmd)
except (exception.PasswordFileFailedToCreate,
processutils.ProcessExecutionError) as e:
LOG.warning(_LW("IPMI power status failed for node %(node_id)s with "
"error: %(error)s."),
{'node_id': driver_info['uuid'], 'error': e})
raise exception.IPMIFailure(cmd=cmd)
if out_err[0] == "Chassis Power is on\n":
return states.POWER_ON
elif out_err[0] == "Chassis Power is off\n":
return states.POWER_OFF
else:
return states.ERROR
def _process_sensor(sensor_data):
sensor_data_fields = sensor_data.split('\n')
sensor_data_dict = {}
for field in sensor_data_fields:
if not field:
continue
kv_value = field.split(':')
if len(kv_value) != 2:
continue
sensor_data_dict[kv_value[0].strip()] = kv_value[1].strip()
return sensor_data_dict
def _get_sensor_type(node, sensor_data_dict):
# Have only three sensor type name IDs: 'Sensor Type (Analog)'
# 'Sensor Type (Discrete)' and 'Sensor Type (Threshold)'
for key in ('Sensor Type (Analog)', 'Sensor Type (Discrete)',
'Sensor Type (Threshold)'):
try:
return sensor_data_dict[key].split(' ', 1)[0]
except KeyError:
continue
raise exception.FailedToParseSensorData(
node=node.uuid,
error=(_("parse ipmi sensor data failed, unknown sensor type"
" data: %(sensors_data)s"),
{'sensors_data': sensor_data_dict}))
def _parse_ipmi_sensors_data(node, sensors_data):
"""Parse the IPMI sensors data and format to the dict grouping by type.
We run 'ipmitool' command with 'sdr -v' options, which can return sensor
details in human-readable format, we need to format them to JSON string
dict-based data for Ceilometer Collector which can be sent it as payload
out via notification bus and consumed by Ceilometer Collector.
:param sensors_data: the sensor data returned by ipmitool command.
:returns: the sensor data with JSON format, grouped by sensor type.
:raises: FailedToParseSensorData when error encountered during parsing.
"""
sensors_data_dict = {}
if not sensors_data:
return sensors_data_dict
sensors_data_array = sensors_data.split('\n\n')
for sensor_data in sensors_data_array:
sensor_data_dict = _process_sensor(sensor_data)
if not sensor_data_dict:
continue
sensor_type = _get_sensor_type(node, sensor_data_dict)
# ignore the sensors which has no current 'Sensor Reading' data
if 'Sensor Reading' in sensor_data_dict:
sensors_data_dict.setdefault(
sensor_type,
{})[sensor_data_dict['Sensor ID']] = sensor_data_dict
# get nothing, no valid sensor data
if not sensors_data_dict:
raise exception.FailedToParseSensorData(
node=node.uuid,
error=(_("parse ipmi sensor data failed, get nothing with input"
" data: %(sensors_data)s")
% {'sensors_data': sensors_data}))
return sensors_data_dict
@task_manager.require_exclusive_lock
def send_raw(task, raw_bytes):
"""Send raw bytes to the BMC. Bytes should be a string of bytes.
:param task: a TaskManager instance.
:param raw_bytes: a string of raw bytes to send, e.g. '0x00 0x01'
:raises: IPMIFailure on an error from ipmitool.
:raises: MissingParameterValue if a required parameter is missing.
:raises: InvalidParameterValue when an invalid value is specified.
"""
node_uuid = task.node.uuid
LOG.debug('Sending node %(node)s raw bytes %(bytes)s',
{'bytes': raw_bytes, 'node': node_uuid})
driver_info = _parse_driver_info(task.node)
cmd = 'raw %s' % raw_bytes
try:
out, err = _exec_ipmitool(driver_info, cmd)
LOG.debug('send raw bytes returned stdout: %(stdout)s, stderr:'
' %(stderr)s', {'stdout': out, 'stderr': err})
except (exception.PasswordFileFailedToCreate,
processutils.ProcessExecutionError) as e:
LOG.exception(_LE('IPMI "raw bytes" failed for node %(node_id)s '
'with error: %(error)s.'),
{'node_id': node_uuid, 'error': e})
raise exception.IPMIFailure(cmd=cmd)
def _check_temp_dir():
"""Check for Valid temp directory."""
global TMP_DIR_CHECKED
# because a temporary file is used to pass the password to ipmitool,
# we should check the directory
if TMP_DIR_CHECKED is None:
try:
utils.check_dir()
except (exception.PathNotFound,
exception.DirectoryNotWritable,
exception.InsufficientDiskSpace) as e:
with excutils.save_and_reraise_exception():
TMP_DIR_CHECKED = False
err_msg = (_("Ipmitool drivers need to be able to create "
"temporary files to pass password to ipmitool. "
"Encountered error: %s") % e)
e.message = err_msg
LOG.error(err_msg)
else:
TMP_DIR_CHECKED = True
class IPMIPower(base.PowerInterface):
def __init__(self):
try:
_check_option_support(['timing', 'single_bridge', 'dual_bridge'])
except OSError:
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to locate usable ipmitool command in "
"the system path when checking ipmitool version"))
_check_temp_dir()
def get_properties(self):
return COMMON_PROPERTIES
def validate(self, task):
"""Validate driver_info for ipmitool driver.
Check that node['driver_info'] contains IPMI credentials.
:param task: a TaskManager instance containing the node to act on.
:raises: InvalidParameterValue if required ipmi parameters are missing.
:raises: MissingParameterValue if a required parameter is missing.
"""
_parse_driver_info(task.node)
# NOTE(deva): don't actually touch the BMC in validate because it is
# called too often, and BMCs are too fragile.
# This is a temporary measure to mitigate problems while
# 1314954 and 1314961 are resolved.
def get_power_state(self, task):
"""Get the current power state of the task's node.
:param task: a TaskManager instance containing the node to act on.
:returns: one of ironic.common.states POWER_OFF, POWER_ON or ERROR.
:raises: InvalidParameterValue if required ipmi parameters are missing.
:raises: MissingParameterValue if a required parameter is missing.
:raises: IPMIFailure on an error from ipmitool (from _power_status
call).
"""
driver_info = _parse_driver_info(task.node)
return _power_status(driver_info)
@task_manager.require_exclusive_lock
def set_power_state(self, task, pstate):
"""Turn the power on or off.
:param task: a TaskManager instance containing the node to act on.
:param pstate: The desired power state, one of ironic.common.states
POWER_ON, POWER_OFF.
:raises: InvalidParameterValue if an invalid power state was specified.
:raises: MissingParameterValue if required ipmi parameters are missing
:raises: PowerStateFailure if the power couldn't be set to pstate.
"""
driver_info = _parse_driver_info(task.node)
if pstate == states.POWER_ON:
state = _power_on(driver_info)
elif pstate == states.POWER_OFF:
state = _power_off(driver_info)
else:
raise exception.InvalidParameterValue(
_("set_power_state called "
"with invalid power state %s.") % pstate)
if state != pstate:
raise exception.PowerStateFailure(pstate=pstate)
@task_manager.require_exclusive_lock
def reboot(self, task):
"""Cycles the power to the task's node.
:param task: a TaskManager instance containing the node to act on.
:raises: MissingParameterValue if required ipmi parameters are missing.
:raises: InvalidParameterValue if an invalid power state was specified.
:raises: PowerStateFailure if the final state of the node is not
POWER_ON.
"""
driver_info = _parse_driver_info(task.node)
_power_off(driver_info)
state = _power_on(driver_info)
if state != states.POWER_ON:
raise exception.PowerStateFailure(pstate=states.POWER_ON)
class IPMIManagement(base.ManagementInterface):
def get_properties(self):
return COMMON_PROPERTIES
def __init__(self):
try:
_check_option_support(['timing', 'single_bridge', 'dual_bridge'])
except OSError:
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to locate usable ipmitool command in "
"the system path when checking ipmitool version"))
_check_temp_dir()
def validate(self, task):
"""Check that 'driver_info' contains IPMI credentials.
Validates whether the 'driver_info' property of the supplied
task's node contains the required credentials information.
:param task: a task from TaskManager.
:raises: InvalidParameterValue if required IPMI parameters
are missing.
:raises: MissingParameterValue if a required parameter is missing.
"""
_parse_driver_info(task.node)
def get_supported_boot_devices(self, task):
"""Get a list of the supported boot devices.
:param task: a task from TaskManager.
:returns: A list with the supported boot devices defined
in :mod:`ironic.common.boot_devices`.
"""
return [boot_devices.PXE, boot_devices.DISK, boot_devices.CDROM,
boot_devices.BIOS, boot_devices.SAFE]
@task_manager.require_exclusive_lock
def set_boot_device(self, task, device, persistent=False):
"""Set the boot device for the task's node.
Set the boot device to use on next reboot of the node.
:param task: a task from TaskManager.
:param device: the boot device, one of
:mod:`ironic.common.boot_devices`.
:param persistent: Boolean value. True if the boot device will
persist to all future boots, False if not.
Default: False.
:raises: InvalidParameterValue if an invalid boot device is specified
:raises: MissingParameterValue if required ipmi parameters are missing.
:raises: IPMIFailure on an error from ipmitool.
"""
if device not in self.get_supported_boot_devices(task):
raise exception.InvalidParameterValue(_(
"Invalid boot device %s specified.") % device)
# note(JayF): IPMI spec indicates unless you send these raw bytes the
# boot device setting times out after 60s. Since it's possible it
# could be >60s before a node is rebooted, we should always send them.
# This mimics pyghmi's current behavior, and the "option=timeout"
# setting on newer ipmitool binaries.
timeout_disable = "0x00 0x08 0x03 0x08"
send_raw(task, timeout_disable)
cmd = "chassis bootdev %s" % device
if persistent:
cmd = cmd + " options=persistent"
driver_info = _parse_driver_info(task.node)
try:
out, err = _exec_ipmitool(driver_info, cmd)
except (exception.PasswordFileFailedToCreate,
processutils.ProcessExecutionError) as e:
LOG.warning(_LW('IPMI set boot device failed for node %(node)s '
'when executing "ipmitool %(cmd)s". '
'Error: %(error)s'),
{'node': driver_info['uuid'], 'cmd': cmd, 'error': e})
raise exception.IPMIFailure(cmd=cmd)
def get_boot_device(self, task):
"""Get the current boot device for the task's node.
Returns the current boot device of the node.
:param task: a task from TaskManager.
:raises: InvalidParameterValue if required IPMI parameters
are missing.
:raises: IPMIFailure on an error from ipmitool.
:raises: MissingParameterValue if a required parameter is missing.
:returns: a dictionary containing:
:boot_device: the boot device, one of
:mod:`ironic.common.boot_devices` or None if it is unknown.
:persistent: Whether the boot device will persist to all
future boots or not, None if it is unknown.
"""
cmd = "chassis bootparam get 5"
driver_info = _parse_driver_info(task.node)
response = {'boot_device': None, 'persistent': None}
try:
out, err = _exec_ipmitool(driver_info, cmd)
except (exception.PasswordFileFailedToCreate,
processutils.ProcessExecutionError) as e:
LOG.warning(_LW('IPMI get boot device failed for node %(node)s '
'when executing "ipmitool %(cmd)s". '
'Error: %(error)s'),
{'node': driver_info['uuid'], 'cmd': cmd, 'error': e})
raise exception.IPMIFailure(cmd=cmd)
re_obj = re.search('Boot Device Selector : (.+)?\n', out)
if re_obj:
boot_selector = re_obj.groups('')[0]
if 'PXE' in boot_selector:
response['boot_device'] = boot_devices.PXE
elif 'Hard-Drive' in boot_selector:
if 'Safe-Mode' in boot_selector:
response['boot_device'] = boot_devices.SAFE
else:
response['boot_device'] = boot_devices.DISK
elif 'BIOS' in boot_selector:
response['boot_device'] = boot_devices.BIOS
elif 'CD/DVD' in boot_selector:
response['boot_device'] = boot_devices.CDROM
response['persistent'] = 'Options apply to all future boots' in out
return response
def get_sensors_data(self, task):
"""Get sensors data.
:param task: a TaskManager instance.
:raises: FailedToGetSensorData when getting the sensor data fails.
:raises: FailedToParseSensorData when parsing sensor data fails.
:raises: InvalidParameterValue if required ipmi parameters are missing
:raises: MissingParameterValue if a required parameter is missing.
:returns: returns a dict of sensor data group by sensor type.
"""
driver_info = _parse_driver_info(task.node)
# with '-v' option, we can get the entire sensor data including the
# extended sensor informations
cmd = "sdr -v"
try:
out, err = _exec_ipmitool(driver_info, cmd)
except (exception.PasswordFileFailedToCreate,
processutils.ProcessExecutionError) as e:
raise exception.FailedToGetSensorData(node=task.node.uuid,
error=e)
return _parse_ipmi_sensors_data(task.node, out)
class VendorPassthru(base.VendorInterface):
def __init__(self):
try:
_check_option_support(['single_bridge', 'dual_bridge'])
except OSError:
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to locate usable ipmitool command in "
"the system path when checking ipmitool version"))
_check_temp_dir()
@base.passthru(['POST'])
@task_manager.require_exclusive_lock
def send_raw(self, task, http_method, raw_bytes):
"""Send raw bytes to the BMC. Bytes should be a string of bytes.
:param task: a TaskManager instance.
:param http_method: the HTTP method used on the request.
:param raw_bytes: a string of raw bytes to send, e.g. '0x00 0x01'
:raises: IPMIFailure on an error from ipmitool.
:raises: MissingParameterValue if a required parameter is missing.
:raises: InvalidParameterValue when an invalid value is specified.
"""
send_raw(task, raw_bytes)
@base.passthru(['POST'])
@task_manager.require_exclusive_lock
def bmc_reset(self, task, http_method, warm=True):
"""Reset BMC with IPMI command 'bmc reset (warm|cold)'.
:param task: a TaskManager instance.
:param http_method: the HTTP method used on the request.
:param warm: boolean parameter to decide on warm or cold reset.
:raises: IPMIFailure on an error from ipmitool.
:raises: MissingParameterValue if a required parameter is missing.
:raises: InvalidParameterValue when an invalid value is specified
"""
node_uuid = task.node.uuid
if warm:
warm_param = 'warm'
else:
warm_param = 'cold'
LOG.debug('Doing %(warm)s BMC reset on node %(node)s',
{'warm': warm_param, 'node': node_uuid})
driver_info = _parse_driver_info(task.node)
cmd = 'bmc reset %s' % warm_param
try:
out, err = _exec_ipmitool(driver_info, cmd)
LOG.debug('bmc reset returned stdout: %(stdout)s, stderr:'
' %(stderr)s', {'stdout': out, 'stderr': err})
except (exception.PasswordFileFailedToCreate,
processutils.ProcessExecutionError) as e:
LOG.exception(_LE('IPMI "bmc reset" failed for node %(node_id)s '
'with error: %(error)s.'),
{'node_id': node_uuid, 'error': e})
raise exception.IPMIFailure(cmd=cmd)
def get_properties(self):
return COMMON_PROPERTIES
def validate(self, task, method, **kwargs):
"""Validate vendor-specific actions.
If invalid, raises an exception; otherwise returns None.
Valid methods:
* send_raw
* bmc_reset
:param task: a task from TaskManager.
:param method: method to be validated
:param kwargs: info for action.
:raises: InvalidParameterValue when an invalid parameter value is
specified.
:raises: MissingParameterValue if a required parameter is missing.
"""
if method == 'send_raw':
if not kwargs.get('raw_bytes'):
raise exception.MissingParameterValue(_(
'Parameter raw_bytes (string of bytes) was not '
'specified.'))
_parse_driver_info(task.node)
class IPMIShellinaboxConsole(base.ConsoleInterface):
"""A ConsoleInterface that uses ipmitool and shellinabox."""
def __init__(self):
try:
_check_option_support(['timing', 'single_bridge', 'dual_bridge'])
except OSError:
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to locate usable ipmitool command in "
"the system path when checking ipmitool version"))
_check_temp_dir()
def get_properties(self):
d = COMMON_PROPERTIES.copy()
d.update(CONSOLE_PROPERTIES)
return d
def validate(self, task):
"""Validate the Node console info.
:param task: a task from TaskManager.
:raises: InvalidParameterValue
:raises: MissingParameterValue when a required parameter is missing
"""
driver_info = _parse_driver_info(task.node)
if not driver_info['port']:
raise exception.MissingParameterValue(_(
"Missing 'ipmi_terminal_port' parameter in node's"
" driver_info."))
if driver_info['protocol_version'] != '2.0':
raise exception.InvalidParameterValue(_(
"Serial over lan only works with IPMI protocol version 2.0. "
"Check the 'ipmi_protocol_version' parameter in "
"node's driver_info"))
def start_console(self, task):
"""Start a remote console for the node.
:param task: a task from TaskManager
:raises: InvalidParameterValue if required ipmi parameters are missing
:raises: PasswordFileFailedToCreate if unable to create a file
containing the password
:raises: ConsoleError if the directory for the PID file cannot be
created
:raises: ConsoleSubprocessFailed when invoking the subprocess failed
"""
driver_info = _parse_driver_info(task.node)
path = _console_pwfile_path(driver_info['uuid'])
pw_file = console_utils.make_persistent_password_file(
path, driver_info['password'])
ipmi_cmd = ("/:%(uid)s:%(gid)s:HOME:ipmitool -H %(address)s"
" -I lanplus -U %(user)s -f %(pwfile)s"
% {'uid': os.getuid(),
'gid': os.getgid(),
'address': driver_info['address'],
'user': driver_info['username'],
'pwfile': pw_file})
for name, option in BRIDGING_OPTIONS:
if driver_info[name] is not None:
ipmi_cmd = " ".join([ipmi_cmd,
option, driver_info[name]])
if CONF.debug:
ipmi_cmd += " -v"
ipmi_cmd += " sol activate"
try:
console_utils.start_shellinabox_console(driver_info['uuid'],
driver_info['port'],
ipmi_cmd)
except (exception.ConsoleError, exception.ConsoleSubprocessFailed):
with excutils.save_and_reraise_exception():
utils.unlink_without_raise(path)
def stop_console(self, task):
"""Stop the remote console session for the node.
:param task: a task from TaskManager
:raises: InvalidParameterValue if required ipmi parameters are missing
:raises: ConsoleError if unable to stop the console
"""
driver_info = _parse_driver_info(task.node)
try:
console_utils.stop_shellinabox_console(driver_info['uuid'])
finally:
utils.unlink_without_raise(
_console_pwfile_path(driver_info['uuid']))
def get_console(self, task):
"""Get the type and connection information about the console."""
driver_info = _parse_driver_info(task.node)
url = console_utils.get_shellinabox_console_url(driver_info['port'])
return {'type': 'shellinabox', 'url': url}<|fim▁end|>
|
'ipmi_address': _("IP address or hostname of the node. Required.")
}
OPTIONAL_PROPERTIES = {
'ipmi_password': _("password. Optional."),
|
<|file_name|>smtp_notification.py<|end_file_name|><|fim▁begin|>import requests
import json
import time
import subprocess
import re
import os
from collections import OrderedDict
from test_framework.test_framework import OpenBazaarTestFramework, TestFailure
from test_framework.smtp_server import SMTP_DUMPFILE
class SMTPTest(OpenBazaarTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 3
def run_test(self):
alice = self.nodes[1]
bob = self.nodes[2]
# post profile for alice
with open('testdata/'+ self.vendor_version +'/profile.json') as profile_file:
profile_json = json.load(profile_file, object_pairs_hook=OrderedDict)
api_url = alice["gateway_url"] + "ob/profile"
requests.post(api_url, data=json.dumps(profile_json, indent=4))
# configure SMTP notifications
time.sleep(4)
api_url = alice["gateway_url"] + "ob/settings"
smtp = {
"smtpSettings" : {
"notifications": True,
"serverAddress": "0.0.0.0:1024",
"username": "usr",
"password": "passwd",
"senderEmail": "[email protected]",
"recipientEmail": "[email protected]"
}
}
r = requests.post(api_url, data=json.dumps(smtp, indent=4))
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Settings POST endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Settings POST failed. Reason: %s", resp["reason"])
time.sleep(4)
# check SMTP settings
api_url = alice["gateway_url"] + "ob/settings"
r = requests.get(api_url)
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Settings GET endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Settings GET failed. Reason: %s", resp["reason"])
# check notifications
addr = "0.0.0.0:1024"
class_name = "test_framework.smtp_server.SMTPTestServer"
proc = subprocess.Popen(["python", "-m", "smtpd", "-n", "-c", class_name, addr])
# generate some coins and send them to bob
time.sleep(4)
api_url = bob["gateway_url"] + "wallet/address/" + self.cointype
r = requests.get(api_url)
if r.status_code == 200:
resp = json.loads(r.text)
address = resp["address"]
elif r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Address endpoint not found")
else:<|fim▁hole|> raise TestFailure("SMTPTest - FAIL: Unknown response")
self.send_bitcoin_cmd("sendtoaddress", address, 10)
time.sleep(20)
# post listing to alice
with open('testdata/'+ self.vendor_version +'/listing.json') as listing_file:
listing_json = json.load(listing_file, object_pairs_hook=OrderedDict)
if self.vendor_version == "v4":
listing_json["metadata"]["priceCurrency"] = "t" + self.cointype
else:
listing_json["item"]["priceCurrency"]["code"] = "t" + self.cointype
listing_json["metadata"]["acceptedCurrencies"] = ["t" + self.cointype]
api_url = alice["gateway_url"] + "ob/listing"
r = requests.post(api_url, data=json.dumps(listing_json, indent=4))
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Listing post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
time.sleep(4)
# get listing hash
api_url = alice["gateway_url"] + "ipns/" + alice["peerId"] + "/listings.json"
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("SMTPTest - FAIL: Couldn't get listing index")
resp = json.loads(r.text)
listingId = resp[0]["hash"]
# bob send order
with open('testdata/'+ self.buyer_version +'/order_direct.json') as order_file:
order_json = json.load(order_file, object_pairs_hook=OrderedDict)
order_json["items"][0]["listingHash"] = listingId
order_json["paymentCoin"] = "t" + self.cointype
api_url = bob["gateway_url"] + "ob/purchase"
r = requests.post(api_url, data=json.dumps(order_json, indent=4))
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Purchase post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Purchase POST failed. Reason: %s", resp["reason"])
resp = json.loads(r.text)
orderId = resp["orderId"]
payment_address = resp["paymentAddress"]
payment_amount = resp["amount"]
# fund order
spend = {
"currencyCode": "T" + self.cointype,
"address": payment_address,
"amount": payment_amount["amount"],
"feeLevel": "NORMAL",
"requireAssociateOrder": False
}
if self.buyer_version == "v4":
spend["amount"] = payment_amount
spend["wallet"] = "T" + self.cointype
api_url = bob["gateway_url"] + "wallet/spend"
r = requests.post(api_url, data=json.dumps(spend, indent=4))
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Spend post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Spend POST failed. Reason: %s", resp["reason"])
time.sleep(20)
proc.terminate()
# check notification
expected = '''From: [email protected]
To: [email protected]
MIME-Version: 1.0
Content-Type: text/html; charset=UTF-8
Subject: [OpenBazaar] Order received
You received an order "Ron Swanson Tshirt".
Order ID: QmNiPgKNq27qQE8fRxMbtDfRcFDEYMH5wDRgdqtqoWBpGg
Buyer: Qmd5qDpcYkHCmkj9pMXU9TKBqEDWgEmtoHD5xjdJgumaHg
Thumbnail: QmXSEqXLCzpCByJU4wqbJ37TcBEj77FKMUWUP1qLh56847
Timestamp: 1487699826
'''
expected_lines = [e for e in expected.splitlines() if not e.startswith('Timestamp:') and not e.startswith('Order ID:')]
with open(SMTP_DUMPFILE, 'r') as f:
res_lines = [l.strip() for l in f.readlines() if not l.startswith('Timestamp') and not l.startswith('Order ID:')]
if res_lines != expected_lines:
os.remove(SMTP_DUMPFILE)
raise TestFailure("SMTPTest - FAIL: Incorrect mail data received")
os.remove(SMTP_DUMPFILE)
print("SMTPTest - PASS")
if __name__ == '__main__':
print("Running SMTPTest")
SMTPTest().main(["--regtest", "--disableexchangerates"])<|fim▁end|>
| |
<|file_name|>ThrMmt.java<|end_file_name|><|fim▁begin|>package org.cloudbus.cloudsim.examples.power.steady;
import java.io.IOException;
/**
* A simulation of a heterogeneous power aware data center that applies the
* Static Threshold (THR) VM allocation policy and Minimum Migration Time (MMT)
* VM selection policy.
*
* The remaining configuration parameters are in the Constants and
* SteadyConstants classes.
*
* If you are using any algorithms, policies or workload included in the power
* package please cite the following paper:
*
* Anton Beloglazov, and Rajkumar Buyya, "Optimal Online Deterministic
* Algorithms and Adaptive Heuristics for Energy and Performance Efficient
* Dynamic Consolidation of Virtual Machines in Cloud Data Centers", Concurrency
<|fim▁hole|> *
* @author Anton Beloglazov
* @since Jan 5, 2012
*/
public class ThrMmt {
/**
* The main method.
*
* @param args
* the arguments
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public static void main(String[] args) throws IOException {
boolean enableOutput = true;
boolean outputToFile = false;
String inputFolder = "";
String outputFolder = "";
String workload = "steady"; // Steady workload
String vmAllocationPolicy = "thr"; // Static Threshold (THR) VM
// allocation policy
String vmSelectionPolicy = "mmt"; // Minimum Migration Time (MMT) VM
// selection policy
String parameter = "0.8"; // the static utilization threshold
new SteadyRunner(enableOutput, outputToFile, inputFolder, outputFolder,
workload, vmAllocationPolicy, vmSelectionPolicy, parameter);
}
}<|fim▁end|>
|
* and Computation: Practice and Experience (CCPE), Volume 24, Issue 13, Pages:
* 1397-1420, John Wiley & Sons, Ltd, New York, USA, 2012
|
<|file_name|>cloudresourcemanager_v3_generated_tag_keys_get_tag_key_async.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|># limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetTagKey
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-resourcemanager
# [START cloudresourcemanager_v3_generated_TagKeys_GetTagKey_async]
from google.cloud import resourcemanager_v3
async def sample_get_tag_key():
# Create a client
client = resourcemanager_v3.TagKeysAsyncClient()
# Initialize request argument(s)
request = resourcemanager_v3.GetTagKeyRequest(
name="name_value",
)
# Make the request
response = await client.get_tag_key(request=request)
# Handle the response
print(response)
# [END cloudresourcemanager_v3_generated_TagKeys_GetTagKey_async]<|fim▁end|>
|
# See the License for the specific language governing permissions and
|
<|file_name|>redis.py<|end_file_name|><|fim▁begin|>from stream_framework.tests.feeds.base import TestBaseFeed, implementation
from stream_framework.feeds.redis import RedisFeed
from stream_framework.activity import Activity
from stream_framework.utils import datetime_to_epoch
class CustomActivity(Activity):
@property
def serialization_id(self):
'''
Shorter serialization id than used by default
'''
if self.object_id >= 10 ** 10 or self.verb.id >= 10 ** 3:
raise TypeError('Fatal: object_id / verb have too many digits !')
if not self.time:
raise TypeError('Cant serialize activities without a time')
milliseconds = str(int(datetime_to_epoch(self.time) * 1000))
# shorter than the default version
serialization_id_str = '%s%0.2d%0.2d' % (
milliseconds, self.object_id % 100, self.verb.id)
serialization_id = int(serialization_id_str)
return serialization_id
class RedisCustom(RedisFeed):
activity_class = CustomActivity
class TestRedisFeed(TestBaseFeed):
feed_cls = RedisFeed
class TestCustomRedisFeed(TestBaseFeed):
'''
Test if the option to customize the activity class works without troubles
'''
feed_cls = RedisCustom
activity_class = CustomActivity
@implementation<|fim▁hole|> self.feed_cls.insert_activity(
self.activity
)
self.test_feed.add(self.activity)
assert self.test_feed.count() == 1
assert self.activity == self.test_feed[:10][0]
assert type(self.activity) == type(self.test_feed[0][0])
# make sure nothing is wrong with the activity storage<|fim▁end|>
|
def test_custom_activity(self):
assert self.test_feed.count() == 0
|
<|file_name|>client.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Hash-addressed content resolver & fetcher.
use std::{io, fs};
use std::io::Write;
use std::sync::Arc;
use std::path::PathBuf;
use hash::keccak_buffer;
use fetch::{Fetch, Response, Error as FetchError, Client as FetchClient};
use futures::Future;
use parity_reactor::Remote;
use urlhint::{ContractClient, URLHintContract, URLHint, URLHintResult};
use bigint::hash::H256;
/// API for fetching by hash.
pub trait HashFetch: Send + Sync + 'static {
/// Fetch hash-addressed content.
/// Parameters:
/// 1. `hash` - content hash
/// 2. `on_done` - callback function invoked when the content is ready (or there was error during fetch)
///
/// This function may fail immediately when fetch cannot be initialized or content cannot be resolved.
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>);
}
/// Hash-fetching error.
#[derive(Debug)]
pub enum Error {
/// Hash could not be resolved to a valid content address.
NoResolution,
/// Downloaded content hash does not match.
HashMismatch {
/// Expected hash
expected: H256,
/// Computed hash
got: H256,
},
/// Server didn't respond with OK status.
InvalidStatus,
/// IO Error while validating hash.
IO(io::Error),
/// Error during fetch.
Fetch(FetchError),
}
#[cfg(test)]
impl PartialEq for Error {
fn eq(&self, other: &Self) -> bool {
use Error::*;
match (self, other) {
(&HashMismatch { expected, got }, &HashMismatch { expected: e, got: g }) => {
expected == e && got == g
},
(&NoResolution, &NoResolution) => true,
(&InvalidStatus, &InvalidStatus) => true,
(&IO(_), &IO(_)) => true,
(&Fetch(_), &Fetch(_)) => true,
_ => false,
}
}
}
impl From<FetchError> for Error {
fn from(error: FetchError) -> Self {
Error::Fetch(error)
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::IO(error)
}
}
fn validate_hash(path: PathBuf, hash: H256, result: Result<Response, FetchError>) -> Result<PathBuf, Error> {
let response = result?;
if !response.is_success() {
return Err(Error::InvalidStatus);
}
// Read the response
let mut reader = io::BufReader::new(response);
let mut writer = io::BufWriter::new(fs::File::create(&path)?);
io::copy(&mut reader, &mut writer)?;
writer.flush()?;
// And validate the hash
let mut file_reader = io::BufReader::new(fs::File::open(&path)?);
let content_hash = keccak_buffer(&mut file_reader)?;
if content_hash != hash {
Err(Error::HashMismatch{ got: content_hash, expected: hash })
} else {
Ok(path)
}
}
/// Default Hash-fetching client using on-chain contract to resolve hashes to URLs.
pub struct Client<F: Fetch + 'static = FetchClient> {
contract: URLHintContract,
fetch: F,
remote: Remote,
random_path: Arc<Fn() -> PathBuf + Sync + Send>,
}
impl Client {
/// Creates new instance of the `Client` given on-chain contract client and task runner.
pub fn new(contract: Arc<ContractClient>, remote: Remote) -> Self {
Client::with_fetch(contract, FetchClient::new().unwrap(), remote)
}
}
impl<F: Fetch + 'static> Client<F> {
/// Creates new instance of the `Client` given on-chain contract client, fetch service and task runner.
pub fn with_fetch(contract: Arc<ContractClient>, fetch: F, remote: Remote) -> Self {
Client {
contract: URLHintContract::new(contract),
fetch: fetch,
remote: remote,
random_path: Arc::new(random_temp_path),
}
}
}
impl<F: Fetch + 'static> HashFetch for Client<F> {
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) {
debug!(target: "fetch", "Fetching: {:?}", hash);
let random_path = self.random_path.clone();
let remote_fetch = self.fetch.clone();
let future = self.contract.resolve(hash.to_vec())
.map_err(|e| { warn!("Error resolving URL: {}", e); Error::NoResolution })
.and_then(|maybe_url| maybe_url.ok_or(Error::NoResolution))
.map(|content| match content {
URLHintResult::Dapp(dapp) => {
dapp.url()
},
URLHintResult::Content(content) => {
content.url
},
})
.and_then(move |url| {
debug!(target: "fetch", "Resolved {:?} to {:?}. Fetching...", hash, url);
let future = remote_fetch.fetch(&url).then(move |result| {
debug!(target: "fetch", "Content fetched, validating hash ({:?})", hash);
let path = random_path();
let res = validate_hash(path.clone(), hash, result);
if let Err(ref err) = res {
trace!(target: "fetch", "Error: {:?}", err);
// Remove temporary file in case of error
let _ = fs::remove_file(&path);
}
res
});
remote_fetch.process(future)
})
.then(move |res| { on_done(res); Ok(()) as Result<(), ()> });
self.remote.spawn(future);
}
}
fn random_temp_path() -> PathBuf {
use ::rand::Rng;
use ::std::env;
let mut rng = ::rand::OsRng::new().expect("Reliable random source is required to work.");
let file: String = rng.gen_ascii_chars().take(12).collect();
let mut path = env::temp_dir();
path.push(file);
path
}
#[cfg(test)]
mod tests {
use rustc_hex::FromHex;
use std::sync::{Arc, mpsc};
use parking_lot::Mutex;
use futures::future;
use fetch::{self, Fetch};
use parity_reactor::Remote;
use urlhint::tests::{FakeRegistrar, URLHINT};
use super::{Error, Client, HashFetch, random_temp_path};
#[derive(Clone)]
struct FakeFetch {
return_success: bool
}
impl Fetch for FakeFetch {
type Result = future::Ok<fetch::Response, fetch::Error>;
fn new() -> Result<Self, fetch::Error> where Self: Sized {
Ok(FakeFetch { return_success: true })
}
fn fetch_with_abort(&self, url: &str, _abort: fetch::Abort) -> Self::Result {
assert_eq!(url, "https://parity.io/assets/images/ethcore-black-horizontal.png");
future::ok(if self.return_success {
let cursor = ::std::io::Cursor::new(b"result");
fetch::Response::from_reader(cursor)
} else {
fetch::Response::not_found()
})
}
}
fn registrar() -> FakeRegistrar {
let mut registrar = FakeRegistrar::new();
registrar.responses = Mutex::new(vec![
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
Ok("00000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff000000000000000000000000000000000000000000000000000000000000003c68747470733a2f2f7061726974792e696f2f6173736574732f696d616765732f657468636f72652d626c61636b2d686f72697a6f6e74616c2e706e6700000000".from_hex().unwrap()),
]);
registrar
}
#[test]
fn should_return_error_if_hash_not_found() {
// given
let contract = Arc::new(FakeRegistrar::new());
let fetch = FakeFetch { return_success: false };
let client = Client::with_fetch(contract.clone(), fetch, Remote::new_sync());
// when
let (tx, rx) = mpsc::channel();
client.fetch(2.into(), Box::new(move |result| {
tx.send(result).unwrap();
}));
// then
let result = rx.recv().unwrap();
assert_eq!(result.unwrap_err(), Error::NoResolution);
}
#[test]
fn should_return_error_if_response_is_not_successful() {
// given
let registrar = Arc::new(registrar());
let fetch = FakeFetch { return_success: false };
let client = Client::with_fetch(registrar.clone(), fetch, Remote::new_sync());
// when
let (tx, rx) = mpsc::channel();
client.fetch(2.into(), Box::new(move |result| {
tx.send(result).unwrap();
}));
// then
let result = rx.recv().unwrap();
assert_eq!(result.unwrap_err(), Error::InvalidStatus);
}
#[test]
fn should_return_hash_mismatch() {
// given
let registrar = Arc::new(registrar());
let fetch = FakeFetch { return_success: true };
let mut client = Client::with_fetch(registrar.clone(), fetch, Remote::new_sync());
let path = random_temp_path();
let path2 = path.clone();
client.random_path = Arc::new(move || path2.clone());
// when
let (tx, rx) = mpsc::channel();
client.fetch(2.into(), Box::new(move |result| {
tx.send(result).unwrap();
}));
// then
let result = rx.recv().unwrap();
let hash = "0x06b0a4f426f6713234b2d4b2468640bc4e0bb72657a920ad24c5087153c593c8".into();
assert_eq!(result.unwrap_err(), Error::HashMismatch { expected: 2.into(), got: hash });
assert!(!path.exists(), "Temporary file should be removed.");
}
#[test]
fn should_return_path_if_hash_matches() {
// given
let registrar = Arc::new(registrar());
let fetch = FakeFetch { return_success: true };
let client = Client::with_fetch(registrar.clone(), fetch, Remote::new_sync());
// when
let (tx, rx) = mpsc::channel();
client.fetch("0x06b0a4f426f6713234b2d4b2468640bc4e0bb72657a920ad24c5087153c593c8".into(), Box::new(move |result| {
tx.send(result).unwrap();
}));
// then<|fim▁hole|> assert!(result.is_ok(), "Should return path, got: {:?}", result);
}
}<|fim▁end|>
|
let result = rx.recv().unwrap();
|
<|file_name|>setplot.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from __future__ import print_function
import os
import numpy
import matplotlib.pyplot as plt
import datetime
import clawpack.visclaw.colormaps as colormap
import clawpack.visclaw.gaugetools as gaugetools
import clawpack.clawutil.data as clawutil
import clawpack.amrclaw.data as amrclaw
import clawpack.geoclaw.data as geodata
from clawpack.geoclaw.util import fetch_noaa_tide_data
import clawpack.geoclaw.surge.plot as surgeplot
try:
from setplotfg import setplotfg
except:
setplotfg = None
def setplot(plotdata=None):
""""""
if plotdata is None:
from clawpack.visclaw.data import ClawPlotData
plotdata = ClawPlotData()
# clear any old figures,axes,items data
plotdata.clearfigures()
plotdata.format = 'ascii'
# Load data from output
clawdata = clawutil.ClawInputData(2)
clawdata.read(os.path.join(plotdata.outdir, 'claw.data'))
physics = geodata.GeoClawData()
physics.read(os.path.join(plotdata.outdir, 'geoclaw.data'))
surge_data = geodata.SurgeData()
surge_data.read(os.path.join(plotdata.outdir, 'surge.data'))
friction_data = geodata.FrictionData()
friction_data.read(os.path.join(plotdata.outdir, 'friction.data'))
# Load storm track
track = surgeplot.track_data(os.path.join(plotdata.outdir, 'fort.track'))
# Set afteraxes function
def surge_afteraxes(cd):
surgeplot.surge_afteraxes(cd, track, plot_direction=False,
kwargs={"markersize": 4})
# Color limits
surface_limits = [-5.0, 5.0]
speed_limits = [0.0, 3.0]
wind_limits = [0, 64]
pressure_limits = [935, 1013]
friction_bounds = [0.01, 0.04]
def friction_after_axes(cd):
plt.title(r"Manning's $n$ Coefficient")
# ==========================================================================
# Plot specifications
# ==========================================================================
regions = {"Gulf": {"xlimits": (clawdata.lower[0], clawdata.upper[0]),
"ylimits": (clawdata.lower[1], clawdata.upper[1]),
"figsize": (6.4, 4.8)},
"Texas Gulf Coast": {"xlimits": (-99.2, -94.2),
"ylimits": (26.4, 30.4),
"figsize": (6, 6)}}
for (name, region_dict) in regions.items():
# Surface Figure
plotfigure = plotdata.new_plotfigure(name="Surface - %s" % name)
plotfigure.kwargs = {"figsize": region_dict['figsize']}
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Surface"
plotaxes.xlimits = region_dict["xlimits"]
plotaxes.ylimits = region_dict["ylimits"]
plotaxes.afteraxes = surge_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# Speed Figure
plotfigure = plotdata.new_plotfigure(name="Currents - %s" % name)
plotfigure.kwargs = {"figsize": region_dict['figsize']}
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Currents"
plotaxes.xlimits = region_dict["xlimits"]
plotaxes.ylimits = region_dict["ylimits"]
plotaxes.afteraxes = surge_afteraxes
surgeplot.add_speed(plotaxes, bounds=speed_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['speed'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#
# Friction field
#
plotfigure = plotdata.new_plotfigure(name='Friction')
plotfigure.show = friction_data.variable_friction and True
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = regions['Gulf']['xlimits']
plotaxes.ylimits = regions['Gulf']['ylimits']
# plotaxes.title = "Manning's N Coefficient"
plotaxes.afteraxes = friction_after_axes
plotaxes.scaled = True
surgeplot.add_friction(plotaxes, bounds=friction_bounds, shrink=0.9)
plotaxes.plotitem_dict['friction'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['friction'].colorbar_label = "$n$"
#
# Hurricane Forcing fields
#
# Pressure field
plotfigure = plotdata.new_plotfigure(name='Pressure')
plotfigure.show = surge_data.pressure_forcing and True
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = regions['Gulf']['xlimits']
plotaxes.ylimits = regions['Gulf']['ylimits']
plotaxes.title = "Pressure Field"
plotaxes.afteraxes = surge_afteraxes
plotaxes.scaled = True
surgeplot.add_pressure(plotaxes, bounds=pressure_limits)
surgeplot.add_land(plotaxes)
# Wind field
plotfigure = plotdata.new_plotfigure(name='Wind Speed')
plotfigure.show = surge_data.wind_forcing and True
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = regions['Gulf']['xlimits']
plotaxes.ylimits = regions['Gulf']['ylimits']
plotaxes.title = "Wind Field"
plotaxes.afteraxes = surge_afteraxes
plotaxes.scaled = True
surgeplot.add_wind(plotaxes, bounds=wind_limits)
surgeplot.add_land(plotaxes)
# ========================================================================
# Figures for gauges
# ========================================================================
plotfigure = plotdata.new_plotfigure(name='Gauge Surfaces', figno=300,
type='each_gauge')
plotfigure.show = True
plotfigure.clf_each_gauge = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
#Time Conversions
def days2seconds(days):
return days * 60.0**2 * 24.0
stations = [('8773037', 'Seadrift'),
('8773701', 'Port OConnor'),
('8774230', 'Aransas Wildlife Refuge'),
('8775237', 'Port Aransas'),
('8775296', 'USS Lexington')]
landfall_time = numpy.datetime64('2017-08-25T10:00')
begin_date = datetime.datetime(2017, 8, 24)
end_date = datetime.datetime(2017, 8, 28)
def get_actual_water_levels(station_id):
# Fetch water levels and tide predictions for given station
date_time, water_level, tide = fetch_noaa_tide_data(station_id,
begin_date, end_date)
# Calculate times relative to landfall
seconds_rel_landfall = (date_time - landfall_time) / numpy.timedelta64(1, 's')
# Subtract tide predictions from measured water levels
water_level -= tide
return seconds_rel_landfall, water_level
def gauge_afteraxes(cd):
station_id, station_name = stations[cd.gaugeno - 1]
seconds_rel_landfall, actual_level = get_actual_water_levels(station_id)
axes = plt.gca()
surgeplot.plot_landfall_gauge(cd.gaugesoln, axes)
axes.plot(seconds_rel_landfall, actual_level, 'g')
# Fix up plot - in particular fix time labels
axes.set_title(station_name)
axes.set_xlabel('Seconds relative to landfall')
axes.set_ylabel('Surface (m)')
axes.set_xlim([days2seconds(-1), days2seconds(3)])
axes.set_ylim([-1, 5])
axes.set_xticks([-days2seconds(-1), 0, days2seconds(1), days2seconds(2), days2seconds(3)])
#axes.set_xticklabels([r"$-1$", r"$0$", r"$1$", r"$2$", r"$3$"])
#axes.grid(True)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.afteraxes = gauge_afteraxes
# Plot surface as blue curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = 3
plotitem.plotstyle = 'b-'
#
# Gauge Location Plot
#
def gauge_location_afteraxes(cd):<|fim▁hole|> gaugetools.plot_gauge_locations(cd.plotdata, gaugenos='all',
format_string='ko', add_labels=False)
#Plot for gauge location 1
plotfigure = plotdata.new_plotfigure(name="Gauge Location 1")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 1'
plotaxes.scaled = True
plotaxes.xlimits = [-96.83, -96.63]
plotaxes.ylimits = [28.33, 28.43]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#Plot for gauge location 2
plotfigure = plotdata.new_plotfigure(name="Gauge Location 2")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 2'
plotaxes.scaled = True
plotaxes.xlimits = [-96.48, -96.28]
plotaxes.ylimits = [28.40, 28.50]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#Plot for gauge location 3
plotfigure = plotdata.new_plotfigure(name="Gauge Location 3")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 3'
plotaxes.scaled = True
plotaxes.xlimits = [-96.85, -96.65]
plotaxes.ylimits = [28.17, 28.27]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#Plot for gauge location 4
plotfigure = plotdata.new_plotfigure(name="Gauge Location 4")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 4'
plotaxes.scaled = True
plotaxes.xlimits = [-97.15, -96.95]
plotaxes.ylimits = [27.79, 27.89]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
#Plot for gauge location 5
plotfigure = plotdata.new_plotfigure(name="Gauge Location 5")
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Gauge Location 5'
plotaxes.scaled = True
plotaxes.xlimits = [-97.48, -97.28]
plotaxes.ylimits = [27.75, 27.85]
plotaxes.afteraxes = gauge_location_afteraxes
surgeplot.add_surface_elevation(plotaxes, bounds=surface_limits)
surgeplot.add_land(plotaxes)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# -----------------------------------------
# Parameters used only when creating html and/or latex hardcopy
# e.g., via pyclaw.plotters.frametools.printframes:
plotdata.printfigs = True # print figures
plotdata.print_format = 'png' # file format
plotdata.print_framenos = 'all' # list of frames to print
plotdata.print_gaugenos = [1, 2, 3, 4, 5] # list of gauges to print
plotdata.print_fignos = 'all' # list of figures to print
plotdata.html = True # create html files of plots?
plotdata.latex = True # create latex file of plots?
plotdata.latex_figsperline = 2 # layout of plots
plotdata.latex_framesperline = 1 # layout of plots
plotdata.latex_makepdf = False # also run pdflatex?
plotdata.parallel = True # parallel plotting
return plotdata<|fim▁end|>
|
plt.subplots_adjust(left=0.12, bottom=0.06, right=0.97, top=0.97)
surge_afteraxes(cd)
|
<|file_name|>account_move.py<|end_file_name|><|fim▁begin|># License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, fields, models
from odoo.exceptions import UserError
class AccountMove(models.Model):
_inherit = "account.move"
pricelist_id = fields.Many2one(
comodel_name="product.pricelist",
string="Pricelist",
readonly=True,
states={"draft": [("readonly", False)]},
)
@api.constrains("pricelist_id", "currency_id")
def _check_currency(self):
for sel in self.filtered(lambda a: a.pricelist_id and a.is_invoice()):
if sel.pricelist_id.currency_id != sel.currency_id:
raise UserError(
_("Pricelist and Invoice need to use the same currency.")
)
@api.onchange("partner_id", "company_id")
def _onchange_partner_id_account_invoice_pricelist(self):
if self.is_invoice():
if (
self.partner_id
and self.move_type in ("out_invoice", "out_refund")
and self.partner_id.property_product_pricelist
):
self.pricelist_id = self.partner_id.property_product_pricelist
self._set_pricelist_currency()
@api.onchange("pricelist_id")
def _set_pricelist_currency(self):
if (
self.is_invoice()
and self.pricelist_id
and self.currency_id != self.pricelist_id.currency_id
):
self.currency_id = self.pricelist_id.currency_id
def button_update_prices_from_pricelist(self):
for inv in self.filtered(lambda r: r.state == "draft"):
inv.invoice_line_ids._onchange_product_id_account_invoice_pricelist()
self.filtered(lambda r: r.state == "draft").with_context(
check_move_validity=False
)._move_autocomplete_invoice_lines_values()
self.filtered(lambda r: r.state == "draft").with_context(
check_move_validity=False
)._recompute_tax_lines()
def _reverse_move_vals(self, default_values, cancel=True):
move_vals = super(AccountMove, self)._reverse_move_vals(
default_values, cancel=cancel
)
if self.pricelist_id:
move_vals["pricelist_id"] = self.pricelist_id.id
return move_vals
class AccountMoveLine(models.Model):
_inherit = "account.move.line"
@api.onchange("product_id", "quantity")
def _onchange_product_id_account_invoice_pricelist(self):
for sel in self:
if not sel.move_id.pricelist_id:
return
sel.with_context(check_move_validity=False).update(
{"price_unit": sel._get_price_with_pricelist()}
)
@api.onchange("product_uom_id")
def _onchange_uom_id(self):
for sel in self:
if (
sel.move_id.is_invoice()
and sel.move_id.state == "draft"
and sel.move_id.pricelist_id
):
price_unit = sel._get_computed_price_unit()
taxes = sel._get_computed_taxes()
if taxes and sel.move_id.fiscal_position_id:
price_subtotal = sel._get_price_total_and_subtotal(
price_unit=price_unit, taxes=taxes
)["price_subtotal"]
accounting_vals = sel._get_fields_onchange_subtotal(
price_subtotal=price_subtotal,
currency=self.move_id.company_currency_id,
)
amount_currency = accounting_vals["amount_currency"]
price_unit = sel._get_fields_onchange_balance(
amount_currency=amount_currency
).get("price_unit", price_unit)
sel.with_context(check_move_validity=False).update(
{"price_unit": price_unit}
)
else:
super(AccountMoveLine, self)._onchange_uom_id()
def _get_real_price_currency(self, product, rule_id, qty, uom, pricelist_id):
PricelistItem = self.env["product.pricelist.item"]
field_name = "lst_price"
currency_id = None
product_currency = product.currency_id
if rule_id:
pricelist_item = PricelistItem.browse(rule_id)
while (
pricelist_item.base == "pricelist"
and pricelist_item.base_pricelist_id
and pricelist_item.base_pricelist_id.discount_policy
== "without_discount"
):
price, rule_id = pricelist_item.base_pricelist_id.with_context(
uom=uom.id
).get_product_price_rule(product, qty, self.move_id.partner_id)
pricelist_item = PricelistItem.browse(rule_id)
if pricelist_item.base == "standard_price":
field_name = "standard_price"
product_currency = product.cost_currency_id
elif (
pricelist_item.base == "pricelist" and pricelist_item.base_pricelist_id
):
field_name = "price"
product = product.with_context(
pricelist=pricelist_item.base_pricelist_id.id
)
product_currency = pricelist_item.base_pricelist_id.currency_id
currency_id = pricelist_item.pricelist_id.currency_id
if not currency_id:<|fim▁hole|> currency_id = product_currency
cur_factor = 1.0
else:
if currency_id.id == product_currency.id:
cur_factor = 1.0
else:
cur_factor = currency_id._get_conversion_rate(
product_currency,
currency_id,
self.company_id or self.env.company,
self.move_id.invoice_date or fields.Date.today(),
)
product_uom = self.env.context.get("uom") or product.uom_id.id
if uom and uom.id != product_uom:
uom_factor = uom._compute_price(1.0, product.uom_id)
else:
uom_factor = 1.0
return product[field_name] * uom_factor * cur_factor, currency_id
def _calculate_discount(self, base_price, final_price):
discount = (base_price - final_price) / base_price * 100
if (discount < 0 and base_price > 0) or (discount > 0 and base_price < 0):
discount = 0.0
return discount
def _get_price_with_pricelist(self):
price_unit = 0.0
if self.move_id.pricelist_id and self.product_id and self.move_id.is_invoice():
if self.move_id.pricelist_id.discount_policy == "with_discount":
product = self.product_id.with_context(
lang=self.move_id.partner_id.lang,
partner=self.move_id.partner_id.id,
quantity=self.quantity,
date_order=self.move_id.invoice_date,
date=self.move_id.invoice_date,
pricelist=self.move_id.pricelist_id.id,
product_uom_id=self.product_uom_id.id,
fiscal_position=(
self.move_id.partner_id.property_account_position_id.id
),
)
tax_obj = self.env["account.tax"]
recalculated_price_unit = (
product.price * self.product_id.uom_id.factor
) / (self.product_uom_id.factor or 1.0)
price_unit = tax_obj._fix_tax_included_price_company(
recalculated_price_unit,
product.taxes_id,
self.tax_ids,
self.company_id,
)
self.with_context(check_move_validity=False).discount = 0.0
else:
product_context = dict(
self.env.context,
partner_id=self.move_id.partner_id.id,
date=self.move_id.invoice_date or fields.Date.today(),
uom=self.product_uom_id.id,
)
final_price, rule_id = self.move_id.pricelist_id.with_context(
product_context
).get_product_price_rule(
self.product_id, self.quantity or 1.0, self.move_id.partner_id
)
base_price, currency = self.with_context(
product_context
)._get_real_price_currency(
self.product_id,
rule_id,
self.quantity,
self.product_uom_id,
self.move_id.pricelist_id.id,
)
if currency != self.move_id.pricelist_id.currency_id:
base_price = currency._convert(
base_price,
self.move_id.pricelist_id.currency_id,
self.move_id.company_id or self.env.company,
self.move_id.invoice_date or fields.Date.today(),
)
price_unit = max(base_price, final_price)
self.with_context(
check_move_validity=False
).discount = self._calculate_discount(base_price, final_price)
return price_unit
def _get_computed_price_unit(self):
price_unit = super(AccountMoveLine, self)._get_computed_price_unit()
if self.move_id.pricelist_id and self.move_id.is_invoice():
price_unit = self._get_price_with_pricelist()
return price_unit<|fim▁end|>
| |
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import React, { useState } from 'react';
import { StyleSheet, ImageStyle, LayoutChangeEvent } from 'react-native';
import { Gesture, GestureDetector } from 'react-native-gesture-handler';
import Animated, {
useAnimatedStyle,
useDerivedValue,
useSharedValue,
withSpring,
} from 'react-native-reanimated';
import { SafeAreaView } from 'react-native-safe-area-context';
import { useHeaderHeight } from '@react-navigation/stack';
const CHAT_HEADS = [
{ imageUrl: 'https://avatars0.githubusercontent.com/u/379606?v=4&s=460' },
{ imageUrl: 'https://avatars3.githubusercontent.com/u/90494?v=4&s=460' },
{ imageUrl: 'https://avatars3.githubusercontent.com/u/726445?v=4&s=460' },
{ imageUrl: 'https://avatars.githubusercontent.com/u/15989228?v=4&s=460' },
];
interface AnimatedOffset {
x: Animated.SharedValue<number>;
y: Animated.SharedValue<number>;
}
interface FollowingChatHeadProps {
imageUri: string;
offset: AnimatedOffset;
offsetToFollow: AnimatedOffset;
style?: ImageStyle;
}
function FollowingChatHead({
imageUri,
style,
offset,
offsetToFollow,
}: FollowingChatHeadProps) {
useDerivedValue(() => {
offset.x.value = withSpring(offsetToFollow.x.value);
offset.y.value = withSpring(offsetToFollow.y.value);
}, []);
const animatedStyle = useAnimatedStyle(() => {
return {
transform: [
{ translateX: offset.x.value },
{ translateY: offset.y.value },
],
};
});
return (
<Animated.Image
style={[styles.box, style, animatedStyle]}
source={{
uri: imageUri,
}}
/>
);
}
function useOffsetAnimatedValue() {
return {
x: useSharedValue(0),
y: useSharedValue(0),
};
}
function clampToValues({
value,
bottom,
top,
}: {
value: number;
bottom: number;
top: number;
}) {
'worklet';
return Math.max(bottom, Math.min(value, top));
}
const Example = () => {
const [dimensions, setDimensions] = useState({ width: 0, height: 0 });
const panOffset = useOffsetAnimatedValue();
const mainChatHeadPosition = useOffsetAnimatedValue();
const chatHeadsOffsets = CHAT_HEADS.map(useOffsetAnimatedValue);
const headerHeight = useHeaderHeight();
const onLayout = ({ nativeEvent }: LayoutChangeEvent) => {
const { width, height } = nativeEvent.layout;
setDimensions({ width, height });
};
const panHandler = Gesture.Pan()
.onUpdate(({ translationX, translationY }) => {
'worklet';
panOffset.x.value = mainChatHeadPosition.x.value + translationX;
panOffset.y.value = mainChatHeadPosition.y.value + translationY;
})
.onEnd(({ absoluteX, absoluteY, velocityX, velocityY }) => {
'worklet';
const { height, width } = dimensions;
const velocityDragX = clampToValues({
value: velocityX * 0.05,
bottom: -100,
top: 100,
});
const velocityDragY = clampToValues({
value: velocityY * 0.05,
bottom: -100,
top: 100,
});
const distFromTop = absoluteY + velocityDragY - headerHeight;
const distFromBottom = height + velocityDragY - absoluteY;
const distFromLeft = absoluteX + velocityDragX;
const distFromRight = width - absoluteX + velocityDragX;
const minDist = Math.min(
distFromTop,
distFromBottom,
distFromLeft,
distFromRight
);
// drag to the edge
switch (minDist) {
case distFromTop: {
panOffset.y.value = withSpring(-IMAGE_SIZE / 2);
panOffset.x.value = withSpring(panOffset.x.value + velocityDragX);
mainChatHeadPosition.y.value = -IMAGE_SIZE / 2;
mainChatHeadPosition.x.value = panOffset.x.value;
break;
}
case distFromBottom: {
panOffset.y.value = withSpring(height - IMAGE_SIZE / 2);
panOffset.x.value = withSpring(panOffset.x.value + velocityDragX);
mainChatHeadPosition.y.value = height - IMAGE_SIZE / 2;
mainChatHeadPosition.x.value = panOffset.x.value;
break;
}
case distFromLeft: {
panOffset.x.value = withSpring(-IMAGE_SIZE / 2);
panOffset.y.value = withSpring(panOffset.y.value + velocityDragY);
mainChatHeadPosition.x.value = -IMAGE_SIZE / 2;
mainChatHeadPosition.y.value = panOffset.y.value;
break;
}
case distFromRight: {
panOffset.x.value = withSpring(width - IMAGE_SIZE / 2);
panOffset.y.value = withSpring(panOffset.y.value + velocityDragY);
mainChatHeadPosition.x.value = width - IMAGE_SIZE / 2;
mainChatHeadPosition.y.value = panOffset.y.value;
break;
}
}
});
const headsComponents = CHAT_HEADS.map(({ imageUrl }, idx) => {<|fim▁hole|> return (
<GestureDetector gesture={panHandler} key={imageUrl}>
<FollowingChatHead
offsetToFollow={panOffset}
imageUri={imageUrl}
offset={headOffset}
/>
</GestureDetector>
);
}
return (
<FollowingChatHead
key={imageUrl}
imageUri={imageUrl}
style={{
marginLeft: idx * 5,
marginTop: idx * 5,
}}
offset={headOffset}
offsetToFollow={chatHeadsOffsets[idx - 1]}
/>
);
});
return (
<SafeAreaView style={styles.container} onLayout={onLayout}>
{/* we want ChatHead with gesture on top */}
{headsComponents.reverse()}
</SafeAreaView>
);
};
export default Example;
const IMAGE_SIZE = 80;
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#F5FCFF',
},
box: {
position: 'absolute',
width: IMAGE_SIZE,
height: IMAGE_SIZE,
borderColor: '#F5FCFF',
backgroundColor: 'plum',
borderRadius: IMAGE_SIZE / 2,
},
});<|fim▁end|>
|
const headOffset = chatHeadsOffsets[idx];
if (idx === 0) {
|
<|file_name|>custom.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from django import template
from django.utils.unittest import TestCase
from .templatetags import custom
class CustomFilterTests(TestCase):
def test_filter(self):
t = template.Template("{% load custom %}{{ string|trim:5 }}")
self.assertEqual(
t.render(template.Context({"string": "abcdefghijklmnopqrstuvwxyz"})),
u"abcde"
)
class CustomTagTests(TestCase):
def verify_tag(self, tag, name):
self.assertEqual(tag.__name__, name)
self.assertEqual(tag.__doc__, 'Expected %s __doc__' % name)
self.assertEqual(tag.__dict__['anything'], 'Expected %s __dict__' % name)
def test_simple_tags(self):
c = template.Context({'value': 42})
t = template.Template('{% load custom %}{% no_params %}')
self.assertEqual(t.render(c), u'no_params - Expected result')
t = template.Template('{% load custom %}{% one_param 37 %}')
self.assertEqual(t.render(c), u'one_param - Expected result: 37')
t = template.Template('{% load custom %}{% explicit_no_context 37 %}')
self.assertEqual(t.render(c), u'explicit_no_context - Expected result: 37')
t = template.Template('{% load custom %}{% no_params_with_context %}')
self.assertEqual(t.render(c), u'no_params_with_context - Expected result (context value: 42)')
t = template.Template('{% load custom %}{% params_and_context 37 %}')
self.assertEqual(t.render(c), u'params_and_context - Expected result (context value: 42): 37')
t = template.Template('{% load custom %}{% simple_two_params 37 42 %}')
self.assertEqual(t.render(c), u'simple_two_params - Expected result: 37, 42')
t = template.Template('{% load custom %}{% simple_one_default 37 %}')
self.assertEqual(t.render(c), u'simple_one_default - Expected result: 37, hi')
t = template.Template('{% load custom %}{% simple_one_default 37 two="hello" %}')
self.assertEqual(t.render(c), u'simple_one_default - Expected result: 37, hello')
t = template.Template('{% load custom %}{% simple_one_default one=99 two="hello" %}')
self.assertEqual(t.render(c), u'simple_one_default - Expected result: 99, hello')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_one_default' received unexpected keyword argument 'three'",
template.Template, '{% load custom %}{% simple_one_default 99 two="hello" three="foo" %}')
t = template.Template('{% load custom %}{% simple_one_default 37 42 %}')
self.assertEqual(t.render(c), u'simple_one_default - Expected result: 37, 42')
t = template.Template('{% load custom %}{% simple_unlimited_args 37 %}')
self.assertEqual(t.render(c), u'simple_unlimited_args - Expected result: 37, hi')
t = template.Template('{% load custom %}{% simple_unlimited_args 37 42 56 89 %}')
self.assertEqual(t.render(c), u'simple_unlimited_args - Expected result: 37, 42, 56, 89')
t = template.Template('{% load custom %}{% simple_only_unlimited_args %}')
self.assertEqual(t.render(c), u'simple_only_unlimited_args - Expected result: ')
t = template.Template('{% load custom %}{% simple_only_unlimited_args 37 42 56 89 %}')
self.assertEqual(t.render(c), u'simple_only_unlimited_args - Expected result: 37, 42, 56, 89')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_two_params' received too many positional arguments",
template.Template, '{% load custom %}{% simple_two_params 37 42 56 %}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_one_default' received too many positional arguments",
template.Template, '{% load custom %}{% simple_one_default 37 42 56 %}')
t = template.Template('{% load custom %}{% simple_unlimited_args_kwargs 37 40|add:2 56 eggs="scrambled" four=1|add:3 %}')
self.assertEqual(t.render(c), u'simple_unlimited_args_kwargs - Expected result: 37, 42, 56 / eggs=scrambled, four=4')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_unlimited_args_kwargs' received some positional argument\(s\) after some keyword argument\(s\)",
template.Template, '{% load custom %}{% simple_unlimited_args_kwargs 37 40|add:2 eggs="scrambled" 56 four=1|add:3 %}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_unlimited_args_kwargs' received multiple values for keyword argument 'eggs'",
template.Template, '{% load custom %}{% simple_unlimited_args_kwargs 37 eggs="scrambled" eggs="scrambled" %}')
def test_simple_tag_registration(self):
# Test that the decorators preserve the decorated function's docstring, name and attributes.
self.verify_tag(custom.no_params, 'no_params')
self.verify_tag(custom.one_param, 'one_param')
self.verify_tag(custom.explicit_no_context, 'explicit_no_context')
self.verify_tag(custom.no_params_with_context, 'no_params_with_context')
self.verify_tag(custom.params_and_context, 'params_and_context')
self.verify_tag(custom.simple_unlimited_args_kwargs, 'simple_unlimited_args_kwargs')
self.verify_tag(custom.simple_tag_without_context_parameter, 'simple_tag_without_context_parameter')
def test_simple_tag_missing_context(self):
# The 'context' parameter must be present when takes_context is True
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_tag_without_context_parameter' is decorated with takes_context=True so it must have a first argument of 'context'",
template.Template, '{% load custom %}{% simple_tag_without_context_parameter 123 %}')
def test_inclusion_tags(self):
c = template.Context({'value': 42})
t = template.Template('{% load custom %}{% inclusion_no_params %}')
self.assertEqual(t.render(c), u'inclusion_no_params - Expected result\n')
t = template.Template('{% load custom %}{% inclusion_one_param 37 %}')
self.assertEqual(t.render(c), u'inclusion_one_param - Expected result: 37\n')
t = template.Template('{% load custom %}{% inclusion_explicit_no_context 37 %}')
self.assertEqual(t.render(c), u'inclusion_explicit_no_context - Expected result: 37\n')
t = template.Template('{% load custom %}{% inclusion_no_params_with_context %}')
self.assertEqual(t.render(c), u'inclusion_no_params_with_context - Expected result (context value: 42)\n')
t = template.Template('{% load custom %}{% inclusion_params_and_context 37 %}')
self.assertEqual(t.render(c), u'inclusion_params_and_context - Expected result (context value: 42): 37\n')
t = template.Template('{% load custom %}{% inclusion_two_params 37 42 %}')
self.assertEqual(t.render(c), u'inclusion_two_params - Expected result: 37, 42\n')
t = template.Template('{% load custom %}{% inclusion_one_default 37 %}')
self.assertEqual(t.render(c), u'inclusion_one_default - Expected result: 37, hi\n')
t = template.Template('{% load custom %}{% inclusion_one_default 37 two="hello" %}')
self.assertEqual(t.render(c), u'inclusion_one_default - Expected result: 37, hello\n')
t = template.Template('{% load custom %}{% inclusion_one_default one=99 two="hello" %}')
self.assertEqual(t.render(c), u'inclusion_one_default - Expected result: 99, hello\n')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_one_default' received unexpected keyword argument 'three'",
template.Template, '{% load custom %}{% inclusion_one_default 99 two="hello" three="foo" %}')
t = template.Template('{% load custom %}{% inclusion_one_default 37 42 %}')
self.assertEqual(t.render(c), u'inclusion_one_default - Expected result: 37, 42\n')
t = template.Template('{% load custom %}{% inclusion_unlimited_args 37 %}')
self.assertEqual(t.render(c), u'inclusion_unlimited_args - Expected result: 37, hi\n')
t = template.Template('{% load custom %}{% inclusion_unlimited_args 37 42 56 89 %}')
self.assertEqual(t.render(c), u'inclusion_unlimited_args - Expected result: 37, 42, 56, 89\n')
t = template.Template('{% load custom %}{% inclusion_only_unlimited_args %}')
self.assertEqual(t.render(c), u'inclusion_only_unlimited_args - Expected result: \n')
t = template.Template('{% load custom %}{% inclusion_only_unlimited_args 37 42 56 89 %}')
self.assertEqual(t.render(c), u'inclusion_only_unlimited_args - Expected result: 37, 42, 56, 89\n')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_two_params' received too many positional arguments",
template.Template, '{% load custom %}{% inclusion_two_params 37 42 56 %}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_one_default' received too many positional arguments",
template.Template, '{% load custom %}{% inclusion_one_default 37 42 56 %}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_one_default' did not receive value\(s\) for the argument\(s\): 'one'",
template.Template, '{% load custom %}{% inclusion_one_default %}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_unlimited_args' did not receive value\(s\) for the argument\(s\): 'one'",
template.Template, '{% load custom %}{% inclusion_unlimited_args %}')
t = template.Template('{% load custom %}{% inclusion_unlimited_args_kwargs 37 40|add:2 56 eggs="scrambled" four=1|add:3 %}')
self.assertEqual(t.render(c), u'inclusion_unlimited_args_kwargs - Expected result: 37, 42, 56 / eggs=scrambled, four=4\n')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_unlimited_args_kwargs' received some positional argument\(s\) after some keyword argument\(s\)",
template.Template, '{% load custom %}{% inclusion_unlimited_args_kwargs 37 40|add:2 eggs="scrambled" 56 four=1|add:3 %}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_unlimited_args_kwargs' received multiple values for keyword argument 'eggs'",
template.Template, '{% load custom %}{% inclusion_unlimited_args_kwargs 37 eggs="scrambled" eggs="scrambled" %}')
def test_include_tag_missing_context(self):
# The 'context' parameter must be present when takes_context is True
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_tag_without_context_parameter' is decorated with takes_context=True so it must have a first argument of 'context'",
template.Template, '{% load custom %}{% inclusion_tag_without_context_parameter 123 %}')
def test_inclusion_tags_from_template(self):
c = template.Context({'value': 42})
t = template.Template('{% load custom %}{% inclusion_no_params_from_template %}')
self.assertEqual(t.render(c), u'inclusion_no_params_from_template - Expected result\n')
t = template.Template('{% load custom %}{% inclusion_one_param_from_template 37 %}')
self.assertEqual(t.render(c), u'inclusion_one_param_from_template - Expected result: 37\n')
t = template.Template('{% load custom %}{% inclusion_explicit_no_context_from_template 37 %}')
self.assertEqual(t.render(c), u'inclusion_explicit_no_context_from_template - Expected result: 37\n')
t = template.Template('{% load custom %}{% inclusion_no_params_with_context_from_template %}')
self.assertEqual(t.render(c), u'inclusion_no_params_with_context_from_template - Expected result (context value: 42)\n')
t = template.Template('{% load custom %}{% inclusion_params_and_context_from_template 37 %}')
self.assertEqual(t.render(c), u'inclusion_params_and_context_from_template - Expected result (context value: 42): 37\n')
t = template.Template('{% load custom %}{% inclusion_two_params_from_template 37 42 %}')
self.assertEqual(t.render(c), u'inclusion_two_params_from_template - Expected result: 37, 42\n')
t = template.Template('{% load custom %}{% inclusion_one_default_from_template 37 %}')
self.assertEqual(t.render(c), u'inclusion_one_default_from_template - Expected result: 37, hi\n')
<|fim▁hole|> self.assertEqual(t.render(c), u'inclusion_unlimited_args_from_template - Expected result: 37, hi\n')
t = template.Template('{% load custom %}{% inclusion_unlimited_args_from_template 37 42 56 89 %}')
self.assertEqual(t.render(c), u'inclusion_unlimited_args_from_template - Expected result: 37, 42, 56, 89\n')
t = template.Template('{% load custom %}{% inclusion_only_unlimited_args_from_template %}')
self.assertEqual(t.render(c), u'inclusion_only_unlimited_args_from_template - Expected result: \n')
t = template.Template('{% load custom %}{% inclusion_only_unlimited_args_from_template 37 42 56 89 %}')
self.assertEqual(t.render(c), u'inclusion_only_unlimited_args_from_template - Expected result: 37, 42, 56, 89\n')
def test_inclusion_tag_registration(self):
# Test that the decorators preserve the decorated function's docstring, name and attributes.
self.verify_tag(custom.inclusion_no_params, 'inclusion_no_params')
self.verify_tag(custom.inclusion_one_param, 'inclusion_one_param')
self.verify_tag(custom.inclusion_explicit_no_context, 'inclusion_explicit_no_context')
self.verify_tag(custom.inclusion_no_params_with_context, 'inclusion_no_params_with_context')
self.verify_tag(custom.inclusion_params_and_context, 'inclusion_params_and_context')
self.verify_tag(custom.inclusion_two_params, 'inclusion_two_params')
self.verify_tag(custom.inclusion_one_default, 'inclusion_one_default')
self.verify_tag(custom.inclusion_unlimited_args, 'inclusion_unlimited_args')
self.verify_tag(custom.inclusion_only_unlimited_args, 'inclusion_only_unlimited_args')
self.verify_tag(custom.inclusion_tag_without_context_parameter, 'inclusion_tag_without_context_parameter')
self.verify_tag(custom.inclusion_tag_use_l10n, 'inclusion_tag_use_l10n')
self.verify_tag(custom.inclusion_tag_current_app, 'inclusion_tag_current_app')
self.verify_tag(custom.inclusion_unlimited_args_kwargs, 'inclusion_unlimited_args_kwargs')
def test_15070_current_app(self):
"""
Test that inclusion tag passes down `current_app` of context to the
Context of the included/rendered template as well.
"""
c = template.Context({})
t = template.Template('{% load custom %}{% inclusion_tag_current_app %}')
self.assertEqual(t.render(c).strip(), u'None')
c.current_app = 'advanced'
self.assertEqual(t.render(c).strip(), u'advanced')
def test_15070_use_l10n(self):
"""
Test that inclusion tag passes down `use_l10n` of context to the
Context of the included/rendered template as well.
"""
c = template.Context({})
t = template.Template('{% load custom %}{% inclusion_tag_use_l10n %}')
self.assertEqual(t.render(c).strip(), u'None')
c.use_l10n = True
self.assertEqual(t.render(c).strip(), u'True')
def test_assignment_tags(self):
c = template.Context({'value': 42})
t = template.Template('{% load custom %}{% assignment_no_params as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_no_params - Expected result')
t = template.Template('{% load custom %}{% assignment_one_param 37 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_one_param - Expected result: 37')
t = template.Template('{% load custom %}{% assignment_explicit_no_context 37 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_explicit_no_context - Expected result: 37')
t = template.Template('{% load custom %}{% assignment_no_params_with_context as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_no_params_with_context - Expected result (context value: 42)')
t = template.Template('{% load custom %}{% assignment_params_and_context 37 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_params_and_context - Expected result (context value: 42): 37')
t = template.Template('{% load custom %}{% assignment_two_params 37 42 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_two_params - Expected result: 37, 42')
t = template.Template('{% load custom %}{% assignment_one_default 37 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_one_default - Expected result: 37, hi')
t = template.Template('{% load custom %}{% assignment_one_default 37 two="hello" as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_one_default - Expected result: 37, hello')
t = template.Template('{% load custom %}{% assignment_one_default one=99 two="hello" as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_one_default - Expected result: 99, hello')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_one_default' received unexpected keyword argument 'three'",
template.Template, '{% load custom %}{% assignment_one_default 99 two="hello" three="foo" as var %}')
t = template.Template('{% load custom %}{% assignment_one_default 37 42 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_one_default - Expected result: 37, 42')
t = template.Template('{% load custom %}{% assignment_unlimited_args 37 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_unlimited_args - Expected result: 37, hi')
t = template.Template('{% load custom %}{% assignment_unlimited_args 37 42 56 89 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_unlimited_args - Expected result: 37, 42, 56, 89')
t = template.Template('{% load custom %}{% assignment_only_unlimited_args as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_only_unlimited_args - Expected result: ')
t = template.Template('{% load custom %}{% assignment_only_unlimited_args 37 42 56 89 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_only_unlimited_args - Expected result: 37, 42, 56, 89')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_one_param' tag takes at least 2 arguments and the second last argument must be 'as'",
template.Template, '{% load custom %}{% assignment_one_param 37 %}The result is: {{ var }}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_one_param' tag takes at least 2 arguments and the second last argument must be 'as'",
template.Template, '{% load custom %}{% assignment_one_param 37 as %}The result is: {{ var }}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_one_param' tag takes at least 2 arguments and the second last argument must be 'as'",
template.Template, '{% load custom %}{% assignment_one_param 37 ass var %}The result is: {{ var }}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_two_params' received too many positional arguments",
template.Template, '{% load custom %}{% assignment_two_params 37 42 56 as var %}The result is: {{ var }}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_one_default' received too many positional arguments",
template.Template, '{% load custom %}{% assignment_one_default 37 42 56 as var %}The result is: {{ var }}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_one_default' did not receive value\(s\) for the argument\(s\): 'one'",
template.Template, '{% load custom %}{% assignment_one_default as var %}The result is: {{ var }}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_unlimited_args' did not receive value\(s\) for the argument\(s\): 'one'",
template.Template, '{% load custom %}{% assignment_unlimited_args as var %}The result is: {{ var }}')
t = template.Template('{% load custom %}{% assignment_unlimited_args_kwargs 37 40|add:2 56 eggs="scrambled" four=1|add:3 as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), u'The result is: assignment_unlimited_args_kwargs - Expected result: 37, 42, 56 / eggs=scrambled, four=4')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_unlimited_args_kwargs' received some positional argument\(s\) after some keyword argument\(s\)",
template.Template, '{% load custom %}{% assignment_unlimited_args_kwargs 37 40|add:2 eggs="scrambled" 56 four=1|add:3 as var %}The result is: {{ var }}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_unlimited_args_kwargs' received multiple values for keyword argument 'eggs'",
template.Template, '{% load custom %}{% assignment_unlimited_args_kwargs 37 eggs="scrambled" eggs="scrambled" as var %}The result is: {{ var }}')
def test_assignment_tag_registration(self):
# Test that the decorators preserve the decorated function's docstring, name and attributes.
self.verify_tag(custom.assignment_no_params, 'assignment_no_params')
self.verify_tag(custom.assignment_one_param, 'assignment_one_param')
self.verify_tag(custom.assignment_explicit_no_context, 'assignment_explicit_no_context')
self.verify_tag(custom.assignment_no_params_with_context, 'assignment_no_params_with_context')
self.verify_tag(custom.assignment_params_and_context, 'assignment_params_and_context')
self.verify_tag(custom.assignment_one_default, 'assignment_one_default')
self.verify_tag(custom.assignment_two_params, 'assignment_two_params')
self.verify_tag(custom.assignment_unlimited_args, 'assignment_unlimited_args')
self.verify_tag(custom.assignment_only_unlimited_args, 'assignment_only_unlimited_args')
self.verify_tag(custom.assignment_unlimited_args, 'assignment_unlimited_args')
self.verify_tag(custom.assignment_unlimited_args_kwargs, 'assignment_unlimited_args_kwargs')
self.verify_tag(custom.assignment_tag_without_context_parameter, 'assignment_tag_without_context_parameter')
def test_assignment_tag_missing_context(self):
# The 'context' parameter must be present when takes_context is True
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'assignment_tag_without_context_parameter' is decorated with takes_context=True so it must have a first argument of 'context'",
template.Template, '{% load custom %}{% assignment_tag_without_context_parameter 123 as var %}')<|fim▁end|>
|
t = template.Template('{% load custom %}{% inclusion_one_default_from_template 37 42 %}')
self.assertEqual(t.render(c), u'inclusion_one_default_from_template - Expected result: 37, 42\n')
t = template.Template('{% load custom %}{% inclusion_unlimited_args_from_template 37 %}')
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>def reference_case_attachment_view(request, domain, case_id, attachment_id):<|fim▁hole|><|fim▁end|>
|
raise NotImplemented("This view is to be overrided by the specific implementations for retrieving case attachments")
|
<|file_name|>NewRoomInfo.js<|end_file_name|><|fim▁begin|>import { Template } from 'meteor/templating';
import { Academy } from '/imports/api/databasedriver.js';
import { Challenges } from '/imports/api/databasedriver.js';
import { Rooms } from '/imports/api/databasedriver.js';
import { Badges } from '/imports/api/databasedriver.js';
import { ROOMS_ACTIVE_ELEMENT_KEY } from '/client/management/rooms/TabRooms.js';
Template.NewRoomInfo.helpers({
badges(){
var badges = Badges.find({}).fetch();
return badges;
},
teamBadge() {
let roomType = "Team";
let roomBadges = Badges.find({'type': roomType }).fetch();
return roomBadges;
}
});
const TABLE_ROOMS_ACTIVE_TEMPLATE_NAME = "TableRoomInfo";
const NEW_ROOM_ACTIVE_TEMPLATE_NAME = "NewRoomInfo";
const EDIT_ROOM_ACTIVE_TEMPLATE_NAME = "EditRoomInfo";
Template.NewRoomInfo.events({
'submit form' (event) {
event.preventDefault();
let data = {};
let roomName = $("#roomName").val();
let roomDecision = $("#roomDecision").val();
let roomDescription = $("#roomDescription").val();
let roomBadge = $("#roomBadge").val();<|fim▁hole|>
data =
{
name: roomName,
dailyDecision: roomDecision,
description: roomDescription,
badges: [{ badge: roomBadge }]
};
//Modal.show('roomsInsertModal', this);
Meteor.call("insertRoom", data, function(error, result) {
if (error) {
alert(error);
}
});
$("#addRoom")[0].reset();
Session.set(ROOMS_ACTIVE_ELEMENT_KEY, TABLE_ROOMS_ACTIVE_TEMPLATE_NAME);
},
'click #nopRoom' (event){
event.preventDefault();
Session.set(ROOMS_ACTIVE_ELEMENT_KEY, TABLE_ROOMS_ACTIVE_TEMPLATE_NAME);
}
});<|fim▁end|>
| |
<|file_name|>CUnmannedTraderGroupDivisionVersionInfo.hpp<|end_file_name|><|fim▁begin|>// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually
#pragma once
#include <common/common.h>
#include <std__vector.hpp>
<|fim▁hole|>
START_ATF_NAMESPACE
#pragma pack(push, 8)
struct CUnmannedTraderGroupDivisionVersionInfo
{
int m_iType;
std::vector<unsigned long> m_vecuiVersion;
public:
CUnmannedTraderGroupDivisionVersionInfo(struct CUnmannedTraderGroupDivisionVersionInfo* lhs);
void ctor_CUnmannedTraderGroupDivisionVersionInfo(struct CUnmannedTraderGroupDivisionVersionInfo* lhs);
CUnmannedTraderGroupDivisionVersionInfo(int iType, unsigned int uiMaxCnt);
void ctor_CUnmannedTraderGroupDivisionVersionInfo(int iType, unsigned int uiMaxCnt);
bool GetVersion(char byClass, unsigned int* dwVer);
bool IncreaseVersion(char byClass);
bool IsEmpty();
~CUnmannedTraderGroupDivisionVersionInfo();
void dtor_CUnmannedTraderGroupDivisionVersionInfo();
};
#pragma pack(pop)
static_assert(ATF::checkSize<CUnmannedTraderGroupDivisionVersionInfo, 48>(), "CUnmannedTraderGroupDivisionVersionInfo");
END_ATF_NAMESPACE<|fim▁end|>
| |
<|file_name|>QSourceLocation.hpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2014-2015 Dinu SV.
** (contact: [email protected])
** This file is part of C++ Snippet Assist application.
**
** GNU General Public License Usage
**
** This file may be used under the terms of the GNU General Public License
** version 3.0 as published by the Free Software Foundation and appearing
** in the file LICENSE.GPL included in the packaging of this file. Please
** review the following information to ensure the GNU General Public License
** version 3.0 requirements will be met: http://www.gnu.org/copyleft/gpl.html.
**
****************************************************************************/
#ifndef QSOURCELOCATION_HPP
#define QSOURCELOCATION_HPP
#include "QCSAGlobal.hpp"
#include <QString>
#include <QObject>
namespace csa{
class Q_CSA_EXPORT QSourceLocation : public QObject{
Q_OBJECT
public:
QSourceLocation(
const QString& file,
unsigned int line,
unsigned int column,
unsigned int offset,
QObject* parent = 0);
QSourceLocation(
const char* file,
unsigned int line,
unsigned int column,
unsigned int offset,
QObject* parent = 0);
QSourceLocation(
const QSourceLocation& other,
QObject* parent = 0);
~QSourceLocation();
void assign(const QSourceLocation& other);
QSourceLocation& operator =(const QSourceLocation& other);
public slots:
unsigned int line() const;
unsigned int column() const;
unsigned int offset() const;
QString filePath() const;
QString fileName() const;
QString toString() const;
private:
QString m_filePath;
unsigned int m_line;
unsigned int m_column;
unsigned int m_offset;
};
inline unsigned int QSourceLocation::line() const{
return m_line;<|fim▁hole|>}
inline unsigned int QSourceLocation::column() const{
return m_column;
}
inline unsigned int QSourceLocation::offset() const{
return m_offset;
}
inline QString QSourceLocation::filePath() const{
return m_filePath;
}
}// namespace
Q_DECLARE_METATYPE(csa::QSourceLocation*)
#endif // QSOURCELOCATION_HPP<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__author__ = 'Yacine Haddad'<|fim▁hole|>__version__ = '2.0.0'<|fim▁end|>
|
__email__ = '[email protected]'
|
<|file_name|>TestSourceModelPackageImpl.java<|end_file_name|><|fim▁begin|>/**
* <copyright>
* </copyright>
*
* $Id$
*/
package de.hub.specificmodels.tests.testsourcemodel.impl;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.impl.EPackageImpl;
import de.hub.specificmodels.tests.testsourcemodel.ClassWithListFeatures;
import de.hub.specificmodels.tests.testsourcemodel.ListFeatureElementClass1;
import de.hub.specificmodels.tests.testsourcemodel.ListFeatureElementClass2;
import de.hub.specificmodels.tests.testsourcemodel.ListFeatureElementClass3;
import de.hub.specificmodels.tests.testsourcemodel.RootClass;
import de.hub.specificmodels.tests.testsourcemodel.TestSourceModelFactory;
import de.hub.specificmodels.tests.testsourcemodel.TestSourceModelPackage;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Package</b>.
* <!-- end-user-doc -->
* @generated
*/
public class TestSourceModelPackageImpl extends EPackageImpl implements TestSourceModelPackage {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass rootClassEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass classWithListFeaturesEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass listFeatureElementClass1EClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass listFeatureElementClass2EClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass listFeatureElementClass3EClass = null;
/**
* Creates an instance of the model <b>Package</b>, registered with
* {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package
* package URI value.
* <p>Note: the correct way to create the package is via the static
* factory method {@link #init init()}, which also performs
* initialization of the package, or returns the registered package,
* if one already exists.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.ecore.EPackage.Registry
* @see de.hub.specificmodels.tests.testsourcemodel.TestSourceModelPackage#eNS_URI
* @see #init()
* @generated
*/
private TestSourceModelPackageImpl() {
super(eNS_URI, TestSourceModelFactory.eINSTANCE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static boolean isInited = false;
/**
* Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends.
*
* <p>This method is used to initialize {@link TestSourceModelPackage#eINSTANCE} when that field is accessed.
* Clients should not invoke it directly. Instead, they should simply access that field to obtain the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static TestSourceModelPackage init() {
if (isInited) return (TestSourceModelPackage)EPackage.Registry.INSTANCE.getEPackage(TestSourceModelPackage.eNS_URI);
// Obtain or create and register package
TestSourceModelPackageImpl theTestSourceModelPackage = (TestSourceModelPackageImpl)(EPackage.Registry.INSTANCE.get(eNS_URI) instanceof TestSourceModelPackageImpl ? EPackage.Registry.INSTANCE.get(eNS_URI) : new TestSourceModelPackageImpl());
isInited = true;
// Create package meta-data objects
theTestSourceModelPackage.createPackageContents();
// Initialize created meta-data
theTestSourceModelPackage.initializePackageContents();
// Mark meta-data to indicate it can't be changed
theTestSourceModelPackage.freeze();
// Update the registry and return the package
EPackage.Registry.INSTANCE.put(TestSourceModelPackage.eNS_URI, theTestSourceModelPackage);
return theTestSourceModelPackage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getRootClass() {
return rootClassEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getRootClass_AnAttribute1() {
return (EAttribute)rootClassEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getRootClass_NormalReference() {
return (EReference)rootClassEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getRootClass_Any() {
return (EAttribute)rootClassEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getRootClass_NonManyReference() {
return (EReference)rootClassEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getClassWithListFeatures() {
return classWithListFeaturesEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getClassWithListFeatures_ListFeature1() {
return (EReference)classWithListFeaturesEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getClassWithListFeatures_ListFeature2() {
return (EReference)classWithListFeaturesEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getClassWithListFeatures_AnAttribute1() {
return (EAttribute)classWithListFeaturesEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getListFeatureElementClass1() {
return listFeatureElementClass1EClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getListFeatureElementClass1_Name() {
return (EAttribute)listFeatureElementClass1EClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getListFeatureElementClass1_ListFeature3() {
return (EReference)listFeatureElementClass1EClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getListFeatureElementClass1_AnAttributeOfFeatureClass1() {
return (EAttribute)listFeatureElementClass1EClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getListFeatureElementClass1_Any() {
return (EAttribute)listFeatureElementClass1EClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getListFeatureElementClass2() {
return listFeatureElementClass2EClass;
}
<|fim▁hole|> */
public EAttribute getListFeatureElementClass2_Name() {
return (EAttribute)listFeatureElementClass2EClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getListFeatureElementClass2_AnAttributeOfFeatureClass2() {
return (EAttribute)listFeatureElementClass2EClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getListFeatureElementClass3() {
return listFeatureElementClass3EClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getListFeatureElementClass3_Name() {
return (EAttribute)listFeatureElementClass3EClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getListFeatureElementClass3_AnAttributeOfFeatureClass3() {
return (EAttribute)listFeatureElementClass3EClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public TestSourceModelFactory getTestSourceModelFactory() {
return (TestSourceModelFactory)getEFactoryInstance();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isCreated = false;
/**
* Creates the meta-model objects for the package. This method is
* guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createPackageContents() {
if (isCreated) return;
isCreated = true;
// Create classes and their features
rootClassEClass = createEClass(ROOT_CLASS);
createEAttribute(rootClassEClass, ROOT_CLASS__AN_ATTRIBUTE1);
createEReference(rootClassEClass, ROOT_CLASS__NORMAL_REFERENCE);
createEAttribute(rootClassEClass, ROOT_CLASS__ANY);
createEReference(rootClassEClass, ROOT_CLASS__NON_MANY_REFERENCE);
classWithListFeaturesEClass = createEClass(CLASS_WITH_LIST_FEATURES);
createEReference(classWithListFeaturesEClass, CLASS_WITH_LIST_FEATURES__LIST_FEATURE1);
createEReference(classWithListFeaturesEClass, CLASS_WITH_LIST_FEATURES__LIST_FEATURE2);
createEAttribute(classWithListFeaturesEClass, CLASS_WITH_LIST_FEATURES__AN_ATTRIBUTE1);
listFeatureElementClass1EClass = createEClass(LIST_FEATURE_ELEMENT_CLASS1);
createEAttribute(listFeatureElementClass1EClass, LIST_FEATURE_ELEMENT_CLASS1__NAME);
createEReference(listFeatureElementClass1EClass, LIST_FEATURE_ELEMENT_CLASS1__LIST_FEATURE3);
createEAttribute(listFeatureElementClass1EClass, LIST_FEATURE_ELEMENT_CLASS1__AN_ATTRIBUTE_OF_FEATURE_CLASS1);
createEAttribute(listFeatureElementClass1EClass, LIST_FEATURE_ELEMENT_CLASS1__ANY);
listFeatureElementClass2EClass = createEClass(LIST_FEATURE_ELEMENT_CLASS2);
createEAttribute(listFeatureElementClass2EClass, LIST_FEATURE_ELEMENT_CLASS2__NAME);
createEAttribute(listFeatureElementClass2EClass, LIST_FEATURE_ELEMENT_CLASS2__AN_ATTRIBUTE_OF_FEATURE_CLASS2);
listFeatureElementClass3EClass = createEClass(LIST_FEATURE_ELEMENT_CLASS3);
createEAttribute(listFeatureElementClass3EClass, LIST_FEATURE_ELEMENT_CLASS3__NAME);
createEAttribute(listFeatureElementClass3EClass, LIST_FEATURE_ELEMENT_CLASS3__AN_ATTRIBUTE_OF_FEATURE_CLASS3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isInitialized = false;
/**
* Complete the initialization of the package and its meta-model. This
* method is guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void initializePackageContents() {
if (isInitialized) return;
isInitialized = true;
// Initialize package
setName(eNAME);
setNsPrefix(eNS_PREFIX);
setNsURI(eNS_URI);
// Create type parameters
// Set bounds for type parameters
// Add supertypes to classes
// Initialize classes and features; add operations and parameters
initEClass(rootClassEClass, RootClass.class, "RootClass", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getRootClass_AnAttribute1(), ecorePackage.getEString(), "anAttribute1", null, 0, 1, RootClass.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getRootClass_NormalReference(), this.getClassWithListFeatures(), null, "normalReference", null, 0, -1, RootClass.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getRootClass_Any(), ecorePackage.getEFeatureMapEntry(), "any", null, 0, -1, RootClass.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getRootClass_NonManyReference(), this.getClassWithListFeatures(), null, "nonManyReference", null, 0, 1, RootClass.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(classWithListFeaturesEClass, ClassWithListFeatures.class, "ClassWithListFeatures", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getClassWithListFeatures_ListFeature1(), this.getListFeatureElementClass1(), null, "listFeature1", null, 0, -1, ClassWithListFeatures.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getClassWithListFeatures_ListFeature2(), this.getListFeatureElementClass2(), null, "listFeature2", null, 0, -1, ClassWithListFeatures.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getClassWithListFeatures_AnAttribute1(), ecorePackage.getEInt(), "anAttribute1", null, 0, 1, ClassWithListFeatures.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(listFeatureElementClass1EClass, ListFeatureElementClass1.class, "ListFeatureElementClass1", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getListFeatureElementClass1_Name(), ecorePackage.getEString(), "name", null, 0, 1, ListFeatureElementClass1.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getListFeatureElementClass1_ListFeature3(), this.getListFeatureElementClass3(), null, "listFeature3", null, 0, -1, ListFeatureElementClass1.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getListFeatureElementClass1_AnAttributeOfFeatureClass1(), ecorePackage.getEString(), "anAttributeOfFeatureClass1", null, 0, 1, ListFeatureElementClass1.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getListFeatureElementClass1_Any(), ecorePackage.getEFeatureMapEntry(), "any", null, 0, -1, ListFeatureElementClass1.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(listFeatureElementClass2EClass, ListFeatureElementClass2.class, "ListFeatureElementClass2", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getListFeatureElementClass2_Name(), ecorePackage.getEString(), "name", null, 0, 1, ListFeatureElementClass2.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getListFeatureElementClass2_AnAttributeOfFeatureClass2(), ecorePackage.getEString(), "anAttributeOfFeatureClass2", null, 0, 1, ListFeatureElementClass2.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(listFeatureElementClass3EClass, ListFeatureElementClass3.class, "ListFeatureElementClass3", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getListFeatureElementClass3_Name(), ecorePackage.getEString(), "name", null, 0, 1, ListFeatureElementClass3.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getListFeatureElementClass3_AnAttributeOfFeatureClass3(), ecorePackage.getEString(), "anAttributeOfFeatureClass3", null, 0, 1, ListFeatureElementClass3.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
// Create resource
createResource(eNS_URI);
// Create annotations
// http:///org/eclipse/emf/ecore/util/ExtendedMetaData
createExtendedMetaDataAnnotations();
}
/**
* Initializes the annotations for <b>http:///org/eclipse/emf/ecore/util/ExtendedMetaData</b>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void createExtendedMetaDataAnnotations() {
String source = "http:///org/eclipse/emf/ecore/util/ExtendedMetaData";
addAnnotation
(getRootClass_Any(),
source,
new String[] {
"kind", "elementWildcard",
"name", ":1",
"processing", "lax",
"wildcards", "##any"
});
addAnnotation
(getListFeatureElementClass1_Any(),
source,
new String[] {
"kind", "elementWildcard",
"name", ":1",
"processing", "lax",
"wildcards", "##any"
});
}
} //TestSourceModelPackageImpl<|fim▁end|>
|
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
|
<|file_name|>common_objects.py<|end_file_name|><|fim▁begin|>import os
import sys
import string
import random
import math
#################################################
# State
balance = 0
def deposit(amount):
global balance
balance += amount
return balance
def withdraw(amount):
global balance
balance -= amount
return balance
#################################################
# Dict like
def make_account():
return {'balance': 0}
def deposit(account, amount):
account['balance'] += amount
return account['balance']
def withdraw(account, amount):
account['balance'] -= amount
return account['balance']
# >>> a = make_account()
# >>> b = make_account()
# >>> deposit(a, 100)
# 100
# >>> deposit(b, 50)
# 50
# >>> withdraw(b, 10)
# 40
# >>> withdraw(a, 10)
# 90
#################################################
# Class
class BankAccount:
def __init__(self, balance=0):
self.balance = balance
def withdraw(self, amount):
self.balance -= amount
return self.balance
def deposit(self, amount):
self.balance += amount<|fim▁hole|># >>> a = BankAccount()
# >>> b = BankAccount()
# >>> a.deposit(100)
# 100
# >>> b.deposit(50)
# 50
# >>> b.withdraw(10)
# 40
# >>> a.withdraw(10)
# 90
#################################################
# Inheritance
class MinimumBalanceAccount(BankAccount):
def __init__(self, minimum_balance):
BankAccount.__init__(self)
self.minimum_balance = minimum_balance
def withdraw(self, amount):
if self.balance - amount < self.minimum_balance:
print('Sorry, minimum balance must be maintained.')
else:
BankAccount.withdraw(self, amount)
# >>> a = MinimumBalanceAccount(0)
# >>> a.deposit(100)
# 100
# >>> b.withdraw(101)
# 'Sorry, minimum balance must be maintained.'
########################################
# Mangling, Exceptions
def generate_id(n=16):
alphabet = string.ascii_letters + string.digits
return ''.join(random.choice(alphabet) for _ in range(n))
class WithdrawError(Exception):
"""Not enough money"""
def __init__(self, amount):
super().__init__()
self.amount = amount
class AdvancedBankAccount:
MAX_BALANCE = 2 ** 64
def __init__(self):
self._balance = 0
self.__id = generate_id()
def withdraw(self, amount):
if not isinstance(amount, int):
raise ValueError
if self._balance < amount:
raise WithdrawError(amount)
self._balance -= amount
return self._balance
def deposit(self, amount):
self._balance += amount
return self._balance
def get_max_balance():
return AdvancedBankAccount.MAX_BALANCE
if __name__ == '__main__':
a = AdvancedBankAccount()
b = a
c = AdvancedBankAccount()
a.deposit(10)
# AdvancedBankAccount.deposit(a, 10) # the same
print('UNACCEPTABLE! b balance:', b._balance)
# print(b.__id) # error, name mangling
a.get_id = lambda self: self.__id
# print(a.get_id()) # TypeError
# print(a.get_id(a)) # AttributeError
################################################
# UNACCEPTABLE!
print("UNACCEPTABLE! b id:", b._AdvancedBankAccount__id) # name unmangling
# static
AdvancedBankAccount.MAX_BALANCE = 2 ** 32
print('max balance:', AdvancedBankAccount.get_max_balance())
a.MAX_BALANCE = 2 ** 64
print('a max: {}, c max: {}'.format(a.MAX_BALANCE,
c.MAX_BALANCE))
################################################
# Exceptions
# in module import
try:
a.withdraw("100")
except:
pass
# UNACCEPTIBLE!
try:
a.withdraw(100)
except WithdrawError as e:
pass
try:
a.withdraw(100)
except (ValueError, WithdrawError) as e:
print('exception raised')
else:
print('no exception')
finally:
print('Finally')
def tricky():
try:
print('Tricky called')
return 1
finally:
print('Tricky finally called')
return 42
return 0
print(tricky())
# how about with statement?
# module is object -> import
class Shape:
def area(self):
raise NotImplementedError
class Circle(Shape):
def __init__(self, radius):
self.radius = radius
def area(self):
return math.pi * self.radius ** 2
class Square(Shape):
def __init__(self, side):
self.side = side
def area(self):
return self.side ** 2
if __name__ == "__main__":
a = [Square(10), Circle(2)]
s = sum(s.area() for s in a)
print(s)<|fim▁end|>
|
return self.balance
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os.path
from setuptools import setup, find_packages
from dist_utils import fetch_requirements
from dist_utils import apply_vagrant_workaround
from st2actions import __version__
ST2_COMPONENT = "st2actions"<|fim▁hole|>install_reqs, dep_links = fetch_requirements(REQUIREMENTS_FILE)
apply_vagrant_workaround()
setup(
name=ST2_COMPONENT,
version=__version__,
description="{} StackStorm event-driven automation platform component".format(
ST2_COMPONENT
),
author="StackStorm",
author_email="[email protected]",
license="Apache License (2.0)",
url="https://stackstorm.com/",
install_requires=install_reqs,
dependency_links=dep_links,
test_suite=ST2_COMPONENT,
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=["setuptools", "tests"]),
scripts=[
"bin/st2actionrunner",
"bin/st2notifier",
"bin/st2workflowengine",
"bin/st2scheduler",
],
)<|fim▁end|>
|
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
REQUIREMENTS_FILE = os.path.join(BASE_DIR, "requirements.txt")
|
<|file_name|>polytope.py<|end_file_name|><|fim▁begin|>#!python
import math
import fractions
import pygame
import argparse
import os.path
import sys
import subprocess
import time
from itertools import combinations,islice
from ntracer import NTracer,Material,ImageFormat,Channel,BlockingRenderer,CUBE
from ntracer.pygame_render import PygameRenderer
ROT_SENSITIVITY = 0.005
WHEEL_INCREMENT = 8
def excepthook(type,value,traceback):
if isinstance(value,Exception):
print('error: '+str(value),file=sys.stderr)
else:
sys.__excepthook__(type,value,traceback)
sys.excepthook = excepthook
def schlafli_component(x):
x = x.partition('/')
p = int(x[0],10)
if p < 3: raise argparse.ArgumentTypeError('a component cannot be less than 3')
if not x[2]: return fractions.Fraction(p)
s = int(x[2],10)
if s < 1: raise argparse.ArgumentTypeError('for component p/q: q cannot be less than 1')
if s >= p: raise argparse.ArgumentTypeError('for component p/q: q must be less than p')
if fractions.gcd(s,p) != 1: raise argparse.ArgumentTypeError('for component p/q: p and q must be co-prime')
return fractions.Fraction(p,s)
def positive_int(x):
x = int(x,10)
if x < 1: raise argparse.ArgumentTypeError('a positive number is required')
return x
def screen_size(x):
w,_,h = x.partition('x')
w = int(w,10)
h = int(h,10)
if w < 1 or h < 1: raise argparse.ArgumentTypeError('invalid screen size')
return w,h
def fov_type(x):
x = float(x)
if x <= 0 or x >= 180: raise argparse.ArgumentTypeError('fov must be between 0 and 180 degrees')
return x/180*math.pi
parser = argparse.ArgumentParser(
description='Display a regular polytope given its Schl\u00e4fli symbol.')
parser.add_argument('schlafli',metavar='N',type=schlafli_component,nargs='+',help='the Schl\u00e4fli symbol components')
parser.add_argument('-o','--output',metavar='PATH',help='save an animation to PATH instead of displaying the polytope')
parser.add_argument('-t','--type',metavar='TYPE',default='h264',
help='Specifies output type when --output is used. If TYPE is "png", the '+
'output is a series of PNG images. For any other value, it is used '+
'as the video codec for ffmpeg.')
parser.add_argument('-f','--frames',metavar='F',type=positive_int,default=160,help='when creating an animation or benchmarking, the number of frames to render')
parser.add_argument('-s','--screen',metavar='WIDTHxHEIGHT',type=screen_size,default=(800,600),help='screen size')
parser.add_argument('-a','--fov',metavar='FOV',type=fov_type,default=0.8,help='field of vision in degrees')
parser.add_argument('-d','--cam-dist',metavar='DIST',type=float,default=4,
help='How far the view-port is from the center of the polytope. The '+
'value is a multiple of the outer raidius of the polytope.')
parser.add_argument('--benchmark',action='store_true',help='measure the speed of rendering the scene')
parser.add_argument('--no-special',action='store_true',help='use the slower generic version of library even if a specialized version exists')
args = parser.parse_args()
material = Material((1,0.5,0.5))
nt = NTracer(max(len(args.schlafli)+1,3),force_generic=args.no_special)
def higher_dihedral_supplement(schlafli,ds):
a = math.pi*schlafli.denominator/schlafli.numerator
return 2*math.asin(math.sin(math.acos(1/(math.tan(ds/2)*math.tan(a))))*math.sin(a))
def almost_equal(a,b,threshold=0.001):
return (a-b).absolute() < threshold
def radial_vector(angle):
return nt.Vector.axis(0,math.sin(angle)) + nt.Vector.axis(1,math.cos(angle))
class Instance:
def __init__(self,shape,position,orientation=nt.Matrix.identity()):
self.shape = shape
self.position = position
self.orientation = orientation
self.inv_orientation = orientation.inverse()
def translated(self,position=nt.Vector(),orientation=nt.Matrix.identity()):
return (
position + (orientation * self.position),
orientation * self.orientation)
def tesselate(self,*args):
return self.shape.tesselate(*self.translated(*args))
def tesselate_inner(self,*args):
return self.shape.tesselate_inner(*self.translated(*args))
def any_point(self,*args):
return self.shape.any_point(*self.translated(*args))
def contains(self,p):
return self.shape.contains(self.inv_orientation * (p - self.position))
def star_component(x):
return (x.numerator - 1) > x.denominator > 1
class LineSegment:
star = False
def __init__(self,index,convex_ds,polygon):
self.index = index
self.p = polygon
self.position = radial_vector(index*convex_ds)
def tesselate(self,position,orientation):
return [
orientation*self.p.base_points[self.index-1]+position,
orientation*self.p.base_points[self.index]+position]
class Polygon:
apothem = 1
def __init__(self,schlafli):
self.star = star_component(schlafli)
convex_ds = 2 * math.pi / schlafli.numerator
self.dihedral_s = convex_ds * schlafli.denominator
self.parts = [LineSegment(i,convex_ds,self) for i in range(schlafli.numerator)]
self._circumradius = 1/math.cos(convex_ds/2)
self.base_points = [self._circumradius * radial_vector((i+0.5) * convex_ds) for i in range(schlafli.numerator)]
if self.star:
self._circumradius = math.tan(convex_ds)*math.tan(convex_ds/2) + 1
self.outer_points = [self._circumradius * radial_vector(i * convex_ds) for i in range(schlafli.numerator)]
def points(self,position,orientation,pset=None):
if pset is None: pset = self.base_points
return (orientation * bp + position for bp in pset)
def tesselate_inner(self,position,orientation):
points = list(self.points(position,orientation))
r = [points[0:3]]
for i in range(len(points)-3):
r.append([points[0],points[i+2],points[i+3]])
return r
def tesselate(self,position,orientation):
if not self.star:
return self.tesselate_inner(position,orientation)
points = list(self.points(position,orientation))
opoints = list(self.points(position,orientation,self.outer_points))
return [[opoints[i],points[i-1],points[i]] for i in range(len(points))]
def any_point(self,position,orientation):
return next(self.points(position,orientation))
def contains(self,p):
return any(almost_equal(p,test_p) for test_p in self.base_points)
def hull(self,position=nt.Vector(),orientation=nt.Matrix.identity()):
tris = [nt.TrianglePrototype(tri,material) for tri in self.tesselate_inner(position,orientation)]
if self.star: tris.extend(nt.TrianglePrototype(tri,material) for tri in
self.tesselate(position,orientation))
return tris
def circumradius(self):
return self._circumradius
def circumradius_square(self):
return self._circumradius*self._circumradius
def line_apothem_square(self):
return 1
class Plane:
def __init__(self,nt,position):
self.normal = position.unit()
self.d = -position.absolute()
self._dot = nt.dot
def distance(self,point):
return self._dot(point,self.normal) + self.d
class Line:
def __init__(self,nt,p0,v,planes,outer=False):
self.p0 = p0
self.v = v
self.planes = set(planes)
self.outer = outer
self._dot = nt.dot
def point_at(self,t):
return self.p0 + self.v*t
def dist_square(self,point):
a = point - self.p0
b = self._dot(a,self.v)
return a.square() - b*b/self.v.square()
def __repr__(self):
return 'Line({0!r},{1!r})'.format(self.p0,self.v)
def plane_point_intersection(nt,planes):
assert nt.dimension == len(planes)
try:
return nt.Matrix(p.normal for p in planes).inverse()*nt.Vector(-p.d for p in planes)
except ValueError:
return None
def plane_line_intersection(nt,planes):
assert nt.dimension - 1 == len(planes)
v = nt.cross(p.normal for p in planes).unit()
return Line(
nt,
nt.Matrix([p.normal for p in planes] + [v]).inverse() * nt.Vector([-p.d for p in planes] + [0]),
v,
planes)
def line_intersection(nt,l1,l2):
d = nt.dot(l1.v,l2.v)
denom = 1 - d*d
if not denom: return None
id = 1/denom
a = nt.dot(l2.p0 - l1.p0,l1.v)
b = nt.dot(l1.p0 - l2.p0,l2.v)
t1 = id*(a + d*b)
t2 = id*(d*a + b)
p1 = l1.point_at(t1)
p2 = l2.point_at(t2)
if abs(p1-p2) > 0.01: return None
return (p1 + p2) * 0.5, t1, t2
class Node:
def __init__(self,pos,planes,outer,alive=True):
self.pos = pos
self.planes = planes
self.outer = outer
self.neighbors = set() if alive else None
def detach(self):
for n in self.neighbors:
n.neighbors.remove(self)
self.neighbors = None
@property
def dead(self):
return self.neighbors is None
def find_cycles(self,length,sequence=None,exclude=None):
if sequence is None: sequence = [self]
if len(sequence) < length:
exclude = exclude.copy() if exclude is not None else set([self])
for n in self.neighbors:
if n not in exclude:
exclude.add(n)
for r in n.find_cycles(length,sequence + [n],exclude):
yield r
else:
for n in self.neighbors:
if n is sequence[0] and n.planes.intersection(*(sequence[i].planes for i in range(1,len(sequence)))):
yield sequence
def join(a,b):
if not (a.dead or b.dead):
a.neighbors.add(b)
b.neighbors.add(a)
class FuzzyGraph:
def __init__(self):
self.nodes = []
def add(self,pos,planes,outer):
for n in self.nodes:
if almost_equal(n.pos,pos):
n.planes |= planes
return n
n = Node(pos,planes,outer)
self.nodes.append(n)
return n
def remove_at(self,i):
self.nodes[i].detach()
if i+1 != len(self.nodes):
self.nodes[i] = self.nodes[-1]
del self.nodes[-1]
def remove(self,pos):
if isinstance(pos,Node):
if not pos.dead:
self.remove_at(self.nodes.index(pos))
else:
for i,n in enumerate(self.nodes):
if almost_equal(n.pos,pos):
self.remove_at(i)
break
# Cells are enlarged ever so slightly to prevent the view frustum from being
# wedged exactly between two adjacent primitives, which, do to limited
# precision, can cause that volume to appear to vanish.
fuzz_scale = nt.Matrix.scale(1.00001)
class PolyTope:
def __init__(self,dimension,schlafli,dihedral_s,face_apothem):
self.dimension = dimension
self.schlafli = schlafli
self.dihedral_s = dihedral_s
self.apothem = math.tan((math.pi - dihedral_s)/2) * face_apothem
self.star = star_component(schlafli)
self.parts = []
@property
def facet(self):
return self.parts[0].shape
def propogate_faces(self,potentials):
new_p = []
for instance,p in potentials:
dir = (instance.orientation * p.position).unit()
reflect = nt.Matrix.reflection(dir)
turn = nt.Matrix.rotation(
instance.position.unit(),
dir,
self.dihedral_s)
new_p += self.add_face(Instance(
instance.shape,
turn * instance.position,
fuzz_scale * turn * reflect * instance.orientation))
return new_p
def add_face(self,instance):
for p in self.parts:
if almost_equal(instance.position,p.position): return []
self.parts.append(instance)
return [(instance,p) for p in instance.shape.parts]
def star_tesselation(self):
t = getattr(self,'_star_tesselation',None)
if t is None:
co_nt = NTracer(self.dimension)
lines = []
planes = [Plane(co_nt,co_nt.Vector(islice(part.position,co_nt.dimension))) for part in self.parts]
las = self.line_apothem_square()
for pgroup in combinations(planes,co_nt.dimension-1):
try:
line = plane_line_intersection(co_nt,pgroup)
except ValueError:
pass
else:
if line:
for lineb in lines:
if almost_equal(line.p0,lineb.p0) and almost_equal(line.v,lineb.v):
lineb.planes |= line.planes
break
else:
outer_dist = line.dist_square(co_nt.Vector()) - las
if outer_dist < 0.1:
line.outer = outer_dist > -0.1
lines.append(line)
pmap = {}
for line in lines:
pmap[line] = {}
graph = FuzzyGraph()
maxr = self.circumradius_square() + 0.1
for l1,l2 in combinations(lines,2):
inter = line_intersection(co_nt,l1,l2)
if inter and inter[0].square() < maxr:
n = graph.add(inter[0],l1.planes | l2.planes,l1.outer or l2.outer)
pmap[l1][n] = inter[1]
pmap[l2][n] = inter[2]
for line,poss in pmap.items():
if len(poss) == 0: continue
if len(poss) == 1:
graph.remove(poss[0])
continue
poss = sorted(poss.items(),key=(lambda x: x[1]))
if line.outer:
for i in range(len(poss)-1):
join(poss[i][0],poss[i+1][0])
elif len(poss) == 2:
join(poss[0][0],poss[1][0])
elif len(poss) > 3:
for i in range(2,len(poss)-2):
graph.remove(poss[i][0])
join(poss[0][0],poss[1][0])
join(poss[-1][0],poss[-2][0])
t = []
self._star_tesselation = t
for n in islice(graph.nodes,0,len(graph.nodes)-co_nt.dimension):
for cycle in n.find_cycles(co_nt.dimension):
t.append([nt.Vector(tuple(x.pos) + (0,) * (nt.dimension-co_nt.dimension)) for x in cycle] + [nt.Vector()])
n.detach()
return t
def tesselate(self,position,orientation):
if self.star or self.facet.star:
return [[orientation * p + position for p in tri] for tri in self.star_tesselation()]
return self.tesselate_inner(position,orientation)
def tesselate_inner(self,position,orientation):
tris = []
point1 = self.parts[0].any_point(position,orientation)
inv_orientation = orientation.inverse()
for part in self.parts[1:]:
if not part.contains(inv_orientation * (point1 - position)):
new_t = part.tesselate(position,orientation)
for t in new_t: t.append(point1)
tris += new_t
return tris
def hull(self,position=nt.Vector(),orientation=nt.Matrix.identity()):
tris = []
for p in self.parts:
tris += p.tesselate(position,orientation)
return [nt.TrianglePrototype(tri,material) for tri in tris]
def any_point(self,position,orientation):
return self.parts[0].any_point(position,orientation)
def contains(self,p):
return any(part.contains(p) for part in self.parts)
def circumradius_square(self):
return self.apothem*self.apothem + self.facet.circumradius_square()
def circumradius(self):
return math.sqrt(self.circumradius_square())
def line_apothem_square(self):
return self.apothem*self.apothem + self.facet.line_apothem_square()
def compose(part,order,schlafli):
if schlafli.numerator * (math.pi - part.dihedral_s) >= math.pi * 2 * schlafli.denominator:
exit("Component #{0} ({1}) is invalid because the angles of the parts add up to 360\u00b0 or\nmore and thus can't be folded inward".format(order,schlafli))
higher = PolyTope(
order+1,
schlafli,
higher_dihedral_supplement(schlafli,part.dihedral_s),
part.apothem)
potentials = higher.add_face(Instance(part,nt.Vector.axis(order,higher.apothem)))
while potentials:
potentials = higher.propogate_faces(potentials)
return higher
jitter = nt.Vector((0,0,0) + (0.0001,) * (nt.dimension-3))
def process_movement():
global x_move, y_move, w_move
if x_move or y_move or w_move:
h = math.sqrt(x_move*x_move + y_move*y_move + w_move*w_move)
a2 = camera.axes[0]*(x_move/h) + camera.axes[1]*(-y_move/h)
if w_move: a2 += camera.axes[3] * (w_move / h)
camera.transform(nt.Matrix.rotation(
camera.axes[2],
a2,
h * ROT_SENSITIVITY))
camera.normalize()
camera.origin = camera.axes[2] * cam_distance + jitter
scene.set_camera(camera)
x_move = 0
y_move = 0
w_move = 0
run()
def run():
global running
running = True
render.begin_render(screen,scene)
try:
timer = time.perf_counter
except AttributeError:
timer = time.clock
if args.benchmark and not sys.platform.startswith('win'):
print('''warning: on multi-core systems, Python\'s high-resolution timer may combine
time spent on all cores, making the reported time spent rendering, much higher
than the actual time''',file=sys.stderr)
class RotatingCamera(object):
incr = 2 * math.pi / args.frames
h = 1/math.sqrt(nt.dimension-1)
_timer = staticmethod(timer if args.benchmark else (lambda: 0))
def __enter__(self):
self.frame = 0
self.total_time = 0
return self
def __exit__(self,type,value,tb):
if type is None and self.total_time:
print('''rendered {0} frame(s) in {1} seconds
time per frame: {2} seconds
frames per second: {3}'''.format(self.frame,self.total_time,self.total_time/self.frame,self.frame/self.total_time))
def start_timer(self):
self.t = self._timer()
def end_timer(self):
self.total_time += self._timer() - self.t
def advance_camera(self):
self.frame += 1
if self.frame >= args.frames: return False
a2 = camera.axes[0]*self.h + camera.axes[1]*self.h
for i in range(nt.dimension-3): a2 += camera.axes[i+3]*self.h
camera.transform(nt.Matrix.rotation(camera.axes[2],a2,self.incr))
camera.normalize()
camera.origin = camera.axes[2] * cam_distance
scene.set_camera(camera)
return True
if nt.dimension >= 3 and args.schlafli[0] == 4 and all(c == 3 for c in args.schlafli[1:]):
cam_distance = -math.sqrt(nt.dimension) * args.cam_dist
scene = nt.BoxScene()
else:
print('building geometry...')
timing = timer()
p = Polygon(args.schlafli[0])
for i,s in enumerate(args.schlafli[1:]):
p = compose(p,i+2,s)
hull = p.hull()
timing = timer() - timing
print('done in {0} seconds'.format(timing))
cam_distance = -math.sqrt(p.circumradius_square()) * args.cam_dist
print('partitioning scene...')
timing = timer()
scene = nt.build_composite_scene(hull)
timing = timer() - timing
print('done in {0} seconds'.format(timing))
del p
del hull
camera = nt.Camera()
camera.translate(nt.Vector.axis(2,cam_distance) + jitter)
scene.set_camera(camera)
scene.set_fov(args.fov)
if args.output is not None:
if args.type != 'png':
render = BlockingRenderer()
format = ImageFormat(
args.screen[0],
args.screen[1],
[Channel(16,1,0,0),
Channel(16,0,1,0),
Channel(16,0,0,1)])
surf = bytearray(args.screen[0]*args.screen[1]*format.bytes_per_pixel)
pipe = subprocess.Popen(['ffmpeg',
'-y',
'-f','rawvideo',
'-vcodec','rawvideo',
'-s','{0}x{1}'.format(*args.screen),
'-pix_fmt','rgb48be',
'-r','60',
'-i','-',
'-an',
'-vcodec',args.type,
'-crf','10',
args.output],
stdin=subprocess.PIPE)
try:
with RotatingCamera() as rc:
while True:
rc.start_timer()
render.render(surf,format,scene)
rc.end_timer()
print(surf,file=pipe.stdin,sep='',end='')
if not rc.advance_camera(): break
finally:
pipe.stdin.close()
r = pipe.wait()
sys.exit(r)
pygame.display.init()
render = PygameRenderer()
surf = pygame.Surface(args.screen,depth=24)
def announce_frame(frame):
print('drawing frame {0}/{1}'.format(frame+1,args.frames))
with RotatingCamera() as rc:
announce_frame(0)
rc.start_timer()
render.begin_render(surf,scene)
while True:
e = pygame.event.wait()
if e.type == pygame.USEREVENT:
rc.end_timer()
pygame.image.save(
surf,
os.path.join(args.output,'frame{0:04}.png'.format(rc.frame)))
if not rc.advance_camera(): break
announce_frame(rc.frame)
rc.start_timer()
render.begin_render(surf,scene)
elif e.type == pygame.QUIT:
render.abort_render()
break
else:
pygame.display.init()
render = PygameRenderer()
screen = pygame.display.set_mode(args.screen)
if args.benchmark:
with RotatingCamera() as rc:
rc.start_timer()
render.begin_render(screen,scene)
while True:
e = pygame.event.wait()
if e.type == pygame.USEREVENT:
rc.end_timer()
pygame.display.flip()
if not rc.advance_camera(): break
rc.start_timer()
render.begin_render(screen,scene)
elif e.type == pygame.QUIT:
render.abort_render()
break
else:
running = False
run()
x_move = 0
y_move = 0
w_move = 0<|fim▁hole|> if e.type == pygame.MOUSEMOTION:
if e.buttons[0]:
x_move += e.rel[0]
y_move += e.rel[1]
if not running:
process_movement()
elif e.type == pygame.MOUSEBUTTONDOWN:
if nt.dimension > 3:
if e.button == 4 or e.button == 5:
if e.button == 4:
w_move += WHEEL_INCREMENT
else:
w_move -= WHEEL_INCREMENT
if not running:
process_movement()
elif e.type == pygame.USEREVENT:
running = False
pygame.display.flip()
process_movement()
elif e.type == pygame.KEYDOWN:
if e.key == pygame.K_c:
x,y = pygame.mouse.get_pos()
fovI = (2 * math.tan(scene.fov/2)) / screen.get_width()
print(camera.origin)
print((camera.axes[2] + camera.axes[0] * (fovI * (x - screen.get_width()/2)) - camera.axes[1] * (fovI * (y - screen.get_height()/2))).unit())
elif e.type == pygame.QUIT:
render.abort_render()
break<|fim▁end|>
|
while True:
e = pygame.event.wait()
|
<|file_name|>router.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for Angular v2.0.0-alpha.35
// Project: http://angular.io/
// Definitions by: angular team <https://github.com/angular/>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
// ***********************************************************
// This file is generated by the Angular build process.
// Please do not create manual edits or send pull requests
// modifying this file.
// ***********************************************************
// angular2/router depends transitively on these libraries.
// If you don't have them installed you can install them using TSD
// https://github.com/DefinitelyTyped/tsd
///<reference path="./angular2.d.ts"/>
/**
* @module
* @description
* Maps application URLs into application states, to support deep-linking and navigation.
*/
declare module ngRouter {
/**
* # Router
* The router is responsible for mapping URLs to components.
*
* You can see the state of the router by inspecting the read-only field `router.navigating`.
* This may be useful for showing a spinner, for instance.
*
* ## Concepts
* Routers and component instances have a 1:1 correspondence.
*
* The router holds reference to a number of "outlets." An outlet is a placeholder that the
* router dynamically fills in depending on the current URL.
*
* When the router navigates from a URL, it must first recognizes it and serialize it into an
* `Instruction`.
* The router uses the `RouteRegistry` to get an `Instruction`.
*/
class Router {
navigating: boolean;
lastNavigationAttempt: string;
registry: RouteRegistry;
parent: Router;
hostComponent: any;
/**
* Constructs a child router. You probably don't need to use this unless you're writing a reusable
* component.
*/
childRouter(hostComponent: any): Router;
/**
* Register an object to notify of route changes. You probably don't need to use this unless
* you're writing a reusable component.
*/
registerOutlet(outlet: RouterOutlet): Promise<boolean>;
/**
* Dynamically update the routing configuration and trigger a navigation.
*
* # Usage
*
* ```
* router.config([
* { 'path': '/', 'component': IndexComp },
* { 'path': '/user/:id', 'component': UserComp },
* ]);
* ```
*/
config(definitions: List<RouteDefinition>): Promise<any>;
/**
* Navigate to a URL. Returns a promise that resolves when navigation is complete.
*
* If the given URL begins with a `/`, router will navigate absolutely.
* If the given URL does not begin with `/`, the router will navigate relative to this component.
*/
navigate(url: string, _skipLocationChange?: boolean): Promise<any>;
/**
* Navigate via the provided instruction. Returns a promise that resolves when navigation is
* complete.
*/
navigateInstruction(instruction: Instruction, _skipLocationChange?: boolean): Promise<any>;
/**
* Updates this router and all descendant routers according to the given instruction
*/
commit(instruction: Instruction, _skipLocationChange?: boolean): Promise<any>;
/**
* Subscribe to URL updates from the router
*/
subscribe(onNext: (value: any) => void): void;
/**
* Removes the contents of this router's outlet and all descendant outlets
*/
deactivate(instruction: Instruction): Promise<any>;
/**
* Given a URL, returns an instruction representing the component graph
*/
recognize(url: string): Promise<Instruction>;
/**
* Navigates to either the last URL successfully navigated to, or the last URL requested if the
* router has yet to successfully navigate.
*/
renavigate(): Promise<any>;
/**
* Generate a URL from a component name and optional map of parameters. The URL is relative to the
* app's base href.
*/
generate(linkParams: List<any>): Instruction;
}
class RootRouter extends Router {
commit(instruction: Instruction, _skipLocationChange?: boolean): Promise<any>;
}
<|fim▁hole|> *
* ## Use
*
* ```
* <router-outlet></router-outlet>
* ```
*/
class RouterOutlet {
childRouter: Router;
name: string;
/**
* Given an instruction, update the contents of this outlet.
*/
commit(instruction: Instruction): Promise<any>;
/**
* Called by Router during recognition phase
*/
canDeactivate(nextInstruction: Instruction): Promise<boolean>;
/**
* Called by Router during recognition phase
*/
canReuse(nextInstruction: Instruction): Promise<boolean>;
deactivate(nextInstruction: Instruction): Promise<any>;
}
/**
* The RouterLink directive lets you link to specific parts of your app.
*
* Consider the following route configuration:
*
* ```
* @RouteConfig([
* { path: '/user', component: UserCmp, as: 'user' }
* ]);
* class MyComp {}
* ```
*
* When linking to this `user` route, you can write:
*
* ```
* <a [router-link]="['./user']">link to user component</a>
* ```
*
* RouterLink expects the value to be an array of route names, followed by the params
* for that level of routing. For instance `['/team', {teamId: 1}, 'user', {userId: 2}]`
* means that we want to generate a link for the `team` route with params `{teamId: 1}`,
* and with a child route `user` with params `{userId: 2}`.
*
* The first route name should be prepended with `/`, `./`, or `../`.
* If the route begins with `/`, the router will look up the route from the root of the app.
* If the route begins with `./`, the router will instead look in the current component's
* children for the route. And if the route begins with `../`, the router will look at the
* current component's parent.
*/
class RouterLink {
visibleHref: string;
routeParams: void;
onClick(): boolean;
}
class RouteParams {
params: StringMap<string, string>;
get(param: string): string;
}
/**
* The RouteRegistry holds route configurations for each component in an Angular app.
* It is responsible for creating Instructions from URLs, and generating URLs based on route and
* parameters.
*/
class RouteRegistry {
/**
* Given a component and a configuration object, add the route to this registry
*/
config(parentComponent: any, config: RouteDefinition): void;
/**
* Reads the annotations of a component and configures the registry based on them
*/
configFromComponent(component: any): void;
/**
* Given a URL and a parent component, return the most specific instruction for navigating
* the application into the state specified by the url
*/
recognize(url: string, parentComponent: any): Promise<Instruction>;
/**
* Given a normalized list with component names and params like: `['user', {id: 3 }]`
* generates a url with a leading slash relative to the provided `parentComponent`.
*/
generate(linkParams: List<any>, parentComponent: any): Instruction;
}
class LocationStrategy {
path(): string;
pushState(ctx: any, title: string, url: string): void;
forward(): void;
back(): void;
onPopState(fn: (_: any) => any): void;
getBaseHref(): string;
}
class HashLocationStrategy extends LocationStrategy {
onPopState(fn: EventListener): void;
getBaseHref(): string;
path(): string;
pushState(state: any, title: string, url: string): void;
forward(): void;
back(): void;
}
class HTML5LocationStrategy extends LocationStrategy {
onPopState(fn: EventListener): void;
getBaseHref(): string;
path(): string;
pushState(state: any, title: string, url: string): void;
forward(): void;
back(): void;
}
/**
* This is the service that an application developer will directly interact with.
*
* Responsible for normalizing the URL against the application's base href.
* A normalized URL is absolute from the URL host, includes the application's base href, and has no
* trailing slash:
* - `/my/app/user/123` is normalized
* - `my/app/user/123` **is not** normalized
* - `/my/app/user/123/` **is not** normalized
*/
class Location {
path(): string;
normalize(url: string): string;
normalizeAbsolutely(url: string): string;
go(url: string): void;
forward(): void;
back(): void;
subscribe(onNext: (value: any) => void, onThrow?: (exception: any) => void, onReturn?: () => void): void;
}
const APP_BASE_HREF : OpaqueToken ;
/**
* Responsible for performing each step of navigation.
* "Steps" are conceptually similar to "middleware"
*/
class Pipeline {
steps: List<Function>;
process(instruction: Instruction): Promise<any>;
}
/**
* Defines route lifecycle method [onActivate], which is called by the router at the end of a
* successful route navigation.
*
* For a single component's navigation, only one of either [onActivate] or [onReuse] will be called,
* depending on the result of [canReuse].
*
* If `onActivate` returns a promise, the route change will wait until the promise settles to
* instantiate and activate child components.
*
* ## Example
* ```
* @Directive({
* selector: 'my-cmp'
* })
* class MyCmp implements OnActivate {
* onActivate(next, prev) {
* this.log = 'Finished navigating from ' + prev.urlPath + ' to ' + next.urlPath;
* }
* }
* ```
*/
interface OnActivate {
onActivate(nextInstruction: ComponentInstruction, prevInstruction: ComponentInstruction): any;
}
/**
* Defines route lifecycle method [onDeactivate], which is called by the router before destroying
* a component as part of a route change.
*
* If `onDeactivate` returns a promise, the route change will wait until the promise settles.
*
* ## Example
* ```
* @Directive({
* selector: 'my-cmp'
* })
* class MyCmp implements CanReuse, OnReuse {
* canReuse() {
* return true;
* }
*
* onReuse(next, prev) {
* this.params = next.params;
* }
* }
* ```
*/
interface OnDeactivate {
onDeactivate(nextInstruction: ComponentInstruction, prevInstruction: ComponentInstruction): any;
}
/**
* Defines route lifecycle method [onReuse], which is called by the router at the end of a
* successful route navigation when [canReuse] is implemented and returns or resolves to true.
*
* For a single component's navigation, only one of either [onActivate] or [onReuse] will be called,
* depending on the result of [canReuse].
*
* ## Example
* ```
* @Directive({
* selector: 'my-cmp'
* })
* class MyCmp implements CanReuse, OnReuse {
* canReuse() {
* return true;
* }
*
* onReuse(next, prev) {
* this.params = next.params;
* }
* }
* ```
*/
interface OnReuse {
onReuse(nextInstruction: ComponentInstruction, prevInstruction: ComponentInstruction): any;
}
/**
* Defines route lifecycle method [canDeactivate], which is called by the router to determine
* if a component can be removed as part of a navigation.
*
* If `canDeactivate` returns or resolves to `false`, the navigation is cancelled.
*
* If `canDeactivate` throws or rejects, the navigation is also cancelled.
*
* ## Example
* ```
* @Directive({
* selector: 'my-cmp'
* })
* class MyCmp implements CanDeactivate {
* canDeactivate(next, prev) {
* return askUserIfTheyAreSureTheyWantToQuit();
* }
* }
* ```
*/
interface CanDeactivate {
canDeactivate(nextInstruction: ComponentInstruction, prevInstruction: ComponentInstruction): any;
}
/**
* Defines route lifecycle method [canReuse], which is called by the router to determine whether a
* component should be reused across routes, or whether to destroy and instantiate a new component.
*
* If `canReuse` returns or resolves to `true`, the component instance will be reused.
*
* If `canReuse` throws or rejects, the navigation will be cancelled.
*
* ## Example
* ```
* @Directive({
* selector: 'my-cmp'
* })
* class MyCmp implements CanReuse, OnReuse {
* canReuse(next, prev) {
* return next.params.id == prev.params.id;
* }
*
* onReuse(next, prev) {
* this.id = next.params.id;
* }
* }
* ```
*/
interface CanReuse {
canReuse(nextInstruction: ComponentInstruction, prevInstruction: ComponentInstruction): any;
}
/**
* Defines route lifecycle method [canActivate], which is called by the router to determine
* if a component can be instantiated as part of a navigation.
*
* Note that unlike other lifecycle hooks, this one uses an annotation rather than an interface.
* This is because [canActivate] is called before the component is instantiated.
*
* If `canActivate` returns or resolves to `false`, the navigation is cancelled.
*
* If `canActivate` throws or rejects, the navigation is also cancelled.
*
* ## Example
* ```
* @Directive({
* selector: 'control-panel-cmp'
* })
* @CanActivate(() => checkIfUserIsLoggedIn())
* class ControlPanelCmp {
* // ...
* }
* ```
*/
var CanActivate : (hook: (next: ComponentInstruction, prev: ComponentInstruction) => Promise<boolean>| boolean) =>
ClassDecorator ;
/**
* `Instruction` is a tree of `ComponentInstructions`, with all the information needed
* to transition each component in the app to a given route, including all auxiliary routes.
*
* This is a public API.
*/
class Instruction {
component: ComponentInstruction;
child: Instruction;
auxInstruction: StringMap<string, Instruction>;
replaceChild(child: Instruction): Instruction;
}
/**
* A `ComponentInstruction` represents the route state for a single component. An `Instruction` is
* composed of a tree of these `ComponentInstruction`s.
*
* `ComponentInstructions` is a public API. Instances of `ComponentInstruction` are passed
* to route lifecycle hooks, like {@link CanActivate}.
*/
class ComponentInstruction {
reuse: boolean;
urlPath: string;
urlParams: List<string>;
params: StringMap<string, any>;
componentType: void;
resolveComponentType(): Promise<Type>;
specificity: void;
terminal: void;
routeData(): Object;
}
class Url {
path: string;
child: Url;
auxiliary: List<Url>;
params: StringMap<string, any>;
toString(): string;
segmentToString(): string;
}
class OpaqueToken {
toString(): string;
}
/**
* Runtime representation of a type.
*
* In JavaScript a Type is a constructor function.
*/
interface Type extends Function {
new(args: any): any;
}
const routerDirectives : List<any> ;
var routerInjectables : List<any> ;
class Route implements RouteDefinition {
data: any;
path: string;
component: Type;
as: string;
loader: Function;
redirectTo: string;
}
class Redirect implements RouteDefinition {
path: string;
redirectTo: string;
as: string;
loader: Function;
data: any;
}
class AuxRoute implements RouteDefinition {
data: any;
path: string;
component: Type;
as: string;
loader: Function;
redirectTo: string;
}
class AsyncRoute implements RouteDefinition {
data: any;
path: string;
loader: Function;
as: string;
}
interface RouteDefinition {
path: string;
component?: Type | ComponentDefinition;
loader?: Function;
redirectTo?: string;
as?: string;
data?: any;
}
const ROUTE_DATA : OpaqueToken ;
var RouteConfig : (configs: List<RouteDefinition>) => ClassDecorator ;
interface ComponentDefinition {
type: string;
loader?: Function;
component?: Type;
}
}
declare module "angular2/router" {
export = ngRouter;
}<|fim▁end|>
|
/**
* A router outlet is a placeholder that Angular dynamically fills based on the application's route.
|
<|file_name|>meteor-methods.js<|end_file_name|><|fim▁begin|>import { Class as Model } from 'meteor/jagi:astronomy';
import * as Errors from './errors.js';
export function init(config) {
config.collection = new Mongo.Collection(config.collectionName);
config.model = Model.create({
name: config.modelName,
collection: config.collection,
fields: config.modelFields,
});
config.saveMethod = 'save' + config.modelName;
config.removeMethod = 'remove' + config.modelName;
var methods = {};
methods[config.saveMethod] = saveDoc;
methods[config.removeMethod] = removeDoc;
Meteor.methods(methods);<|fim▁hole|>
let colFieldsFunc = function () {
result = [];
for (var i = 0; i < config.formFields.length; i++) {
if (config.formFields[i].colClass) {
result[i] = config.formFields[i];
}
}
return result;
}
config.colFields = colFieldsFunc();
}
export function saveDoc (doc) {
if ( !Meteor.userId() ) {
return;
}
try {
doc.save();
} catch (e) {
Errors.handle(e);
}
}
export function removeDoc (doc) {
if ( !Meteor.userId() ) {
return;
}
doc.remove();
}<|fim▁end|>
| |
<|file_name|>StatefulOperatorSpec.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*<|fim▁hole|> *
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.operators.spec;
import java.util.Collection;
/**
* Spec for stateful operators.
*/
public interface StatefulOperatorSpec {
/**
* Get the store descriptors for stores required by this operator.
*
* @return store descriptors for this operator's stores
*/
Collection<StoreDescriptor> getStoreDescriptors();
}<|fim▁end|>
|
* http://www.apache.org/licenses/LICENSE-2.0
|
<|file_name|>parter.hpp<|end_file_name|><|fim▁begin|>#ifndef NT2_GALLERY_INCLUDE_FUNCTIONS_SCALAR_PARTER_HPP_INCLUDED
#define NT2_GALLERY_INCLUDE_FUNCTIONS_SCALAR_PARTER_HPP_INCLUDED
<|fim▁hole|>
#endif<|fim▁end|>
|
#include <nt2/gallery/functions/parter.hpp>
|
<|file_name|>dir_40645110f4b881381ac11b52da3dfc1e.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
var dir_40645110f4b881381ac11b52da3dfc1e =
[
[ "provider", "dir_9a95dbcede8719bb251f64fc00e6b0a1.html", "dir_9a95dbcede8719bb251f64fc00e6b0a1" ]
];
|
<|file_name|>test_append.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import pytest
import time
import sys
import cPickle as pickle
from test_base_class import TestBaseClass
aerospike = pytest.importorskip("aerospike")
try:
from aerospike.exception import *
except:
print "Please install aerospike python client."
sys.exit(1)
class TestAppend(object):
def setup_class(cls):
"""
Setup method.
"""
hostlist, user, password = TestBaseClass.get_hosts()
config = {'hosts': hostlist}
if user == None and password == None:
TestAppend.client = aerospike.client(config).connect()
else:
TestAppend.client = aerospike.client(config).connect(user, password)
def teardown_class(cls):
TestAppend.client.close()
def setup_method(self, method):
for i in xrange(5):
key = ('test', 'demo', i)
rec = {'name': 'name%s' % (str(i)), 'age': i}
TestAppend.client.put(key, rec)
def teardown_method(self, method):
"""
Teardoen method.
"""
#time.sleep(1)
for i in xrange(5):
key = ('test', 'demo', i)
TestAppend.client.remove(key)
def test_append_with_no_parameters(self):
"""
Invoke append() without any mandatory parameters.
"""
with pytest.raises(TypeError) as typeError:
TestAppend.client.append()
assert "Required argument 'key' (pos 1) not found" in typeError.value
def test_append_with_correct_paramters(self):
"""
Invoke append() with correct parameters
"""
key = ('test', 'demo', 1)
TestAppend.client.append(key, "name", "str")
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
def test_append_with_correct_policy(self):
"""
Invoke append() with correct policy
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'retry': aerospike.POLICY_RETRY_ONCE,
'commit_level': aerospike.POLICY_COMMIT_LEVEL_MASTER
}
TestAppend.client.append(key, "name", "str", {}, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
def test_append_with_policy_key_send(self):
"""
Invoke append() with policy key send
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'commit_level': aerospike.POLICY_COMMIT_LEVEL_ALL
}
TestAppend.client.append(key, "name", "str", {}, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_digest(self):
"""
Invoke append() with policy key digest
"""
key = ('test', 'demo', None, bytearray("asd;as[d'as;djk;uyfl",
"utf-8"))
rec = {'name': 'name%s' % (str(1)), 'age': 1, 'nolist': [1, 2, 3]}
TestAppend.client.put(key, rec)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_DIGEST,
'retry': aerospike.POLICY_RETRY_NONE
}
TestAppend.client.append(key, "name", "str", {}, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str', 'nolist': [1, 2, 3]}
assert key == ('test', 'demo', None,
bytearray(b"asd;as[d\'as;djk;uyfl"))
TestAppend.client.remove(key)
def test_append_with_policy_key_gen_EQ_ignore(self):
"""
Invoke append() with gen eq positive ignore
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_IGNORE
}
meta = {'gen': 10, 'ttl': 1200}
TestAppend.client.append(key, "name", "str", meta, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_gen_EQ_positive(self):
"""
Invoke append() with gen eq positive
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_EQ
}
(key, meta) = TestAppend.client.exists(key)
gen = meta['gen']
meta = {'gen': gen, 'ttl': 1200}
TestAppend.client.append(key, "name", "str", meta, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_gen_GT_lesser(self):
"""
Invoke append() with gen GT lesser
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_GT
}
(key, meta) = TestAppend.client.exists(key)
gen = meta['gen']
meta = {
'gen': gen,
'ttl': 1200
}
try:
TestAppend.client.append(key, "name", "str", meta, policy)
except RecordGenerationError as exception:
assert exception.code == 3
assert exception.msg == "AEROSPIKE_ERR_RECORD_GENERATION"
assert exception.bin == "name"
(key , meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_gen_GT_positive(self):
"""
Invoke append() with gen GT positive
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_GT
}
(key, meta) = TestAppend.client.exists(key)
gen = meta['gen']
meta = {'gen': gen + 2, 'ttl': 1200}
TestAppend.client.append(key, "name", "str", meta, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_gen_EQ_not_equal(self):
"""
Invoke append() with policy key EQ not equal
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_EQ
}
(key, meta) = TestAppend.client.exists(key)
gen = meta['gen']
meta = {
'gen': gen + 5,
'ttl': 1200
}
try:
TestAppend.client.append(key, "name", "str", meta, policy)
except RecordGenerationError as exception:
assert exception.code == 3
assert exception.msg == "AEROSPIKE_ERR_RECORD_GENERATION"
assert exception.bin == "name"
(key , meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_incorrect_policy(self):
"""
Invoke append() with incorrect policy
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 0.5
}
try:
TestAppend.client.append(key, "name", "str", {}, policy)
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "timeout is invalid"
def test_append_with_nonexistent_key(self):
"""
Invoke append() with non-existent key
"""
key = ('test', 'demo', 1000)
status = TestAppend.client.append(key, "name", "str")
assert status == 0L
TestAppend.client.remove(key)
def test_append_with_nonexistent_bin(self):
"""
Invoke append() with non-existent bin
"""
key = ('test', 'demo', 1)
status = TestAppend.client.append(key, "name1", "str")
assert status == 0L
def test_append_value_not_string(self):
"""
Invoke append() not a string
"""
key = ('test', 'demo', 1)
try:
TestAppend.client.append(key, "name", 2)
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "Cannot concatenate 'str' and 'non-str' objects"
def test_append_with_extra_parameter(self):
"""
Invoke append() with extra parameter.
"""
key = ('test', 'demo', 1)
policy = {'timeout': 1000}
with pytest.raises(TypeError) as typeError:
TestAppend.client.append(key, "name", "str", {}, policy, "")
assert "append() takes at most 5 arguments (6 given)" in typeError.value
def test_append_policy_is_string(self):
"""
Invoke append() with policy is string
"""
key = ('test', 'demo', 1)
try:
TestAppend.client.append(key, "name", "pqr", {}, "")
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "policy must be a dict"
def test_append_key_is_none(self):
"""
Invoke append() with key is none
"""
try:
TestAppend.client.append(None, "name", "str")
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "key is invalid"
def test_append_bin_is_none(self):
"""
Invoke append() with bin is none
"""
key = ('test', 'demo', 1)
try:
TestAppend.client.append(key, None, "str")
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "Bin name should be of type string"
def test_append_unicode_value(self):
"""
Invoke append() with unicode string
"""
key = ('test', 'demo', 1)
res = TestAppend.client.append(key, "name", u"address")
key, meta, bins = TestAppend.client.get(key)
assert bins['name'] == 'name1address'
def test_append_unicode_bin_name(self):
"""
Invoke append() with unicode string
"""
key = ('test', 'demo', 1)
res = TestAppend.client.append(key, u"add", u"address")
key, meta, bins = TestAppend.client.get(key)
assert bins['add'] == 'address'
def test_append_with_correct_paramters_without_connection(self):
"""
Invoke append() with correct parameters without connection
"""
config = {'hosts': [('127.0.0.1', 3000)]}
client1 = aerospike.client(config)
key = ('test', 'demo', 1)
<|fim▁hole|> try:
client1.append(key, "name", "str")
except ClusterError as exception:
assert exception.code == 11L
assert exception.msg == 'No connection to aerospike cluster'<|fim▁end|>
| |
<|file_name|>externalMissingOnBase.ts<|end_file_name|><|fim▁begin|>import 'apollo-server-env';<|fim▁hole|>/**
* All fields marked with @external must exist on the base type
*/
export const externalMissingOnBase: PostCompositionValidator = ({ schema }) => {
const errors: GraphQLError[] = [];
const types = schema.getTypeMap();
for (const [typeName, namedType] of Object.entries(types)) {
// Only object types have fields
if (!isObjectType(namedType)) continue;
const typeFederationMetadata = getFederationMetadata(namedType);
// If externals is populated, we need to look at each one and confirm
// that field exists on base service
if (typeFederationMetadata?.externals) {
// loop over every service that has extensions with @external
for (const [serviceName, externalFieldsForService] of Object.entries(
typeFederationMetadata.externals,
)) {
// for a single service, loop over the external fields.
for (const { field: externalField } of externalFieldsForService) {
const externalFieldName = externalField.name.value;
const allFields = namedType.getFields();
const matchingBaseField = allFields[externalFieldName];
// @external field referenced a field that isn't defined anywhere
if (!matchingBaseField) {
errors.push(
errorWithCode(
'EXTERNAL_MISSING_ON_BASE',
logServiceAndType(serviceName, typeName, externalFieldName) +
`marked @external but ${externalFieldName} is not defined on the base service of ${typeName} (${typeFederationMetadata.serviceName})`,
),
);
continue;
}
// if the field has a serviceName, then it wasn't defined by the
// service that owns the type
const fieldFederationMetadata = getFederationMetadata(matchingBaseField);
if (fieldFederationMetadata?.serviceName) {
errors.push(
errorWithCode(
'EXTERNAL_MISSING_ON_BASE',
logServiceAndType(serviceName, typeName, externalFieldName) +
`marked @external but ${externalFieldName} was defined in ${fieldFederationMetadata.serviceName}, not in the service that owns ${typeName} (${typeFederationMetadata.serviceName})`,
),
);
}
}
}
}
}
return errors;
};<|fim▁end|>
|
import { isObjectType, GraphQLError } from 'graphql';
import { logServiceAndType, errorWithCode, getFederationMetadata } from '../../utils';
import { PostCompositionValidator } from '.';
|
<|file_name|>httpdatehelper.py<|end_file_name|><|fim▁begin|>"""
httpdatehelper
==============
:Module: pyfileserver.httpdatehelper
:Author: Ho Chun Wei, fuzzybr80(at)gmail.com
:Project: PyFileServer, http://pyfilesync.berlios.de/
:Copyright: Lesser GNU Public License, see LICENSE file attached with package
HTTP dates helper - an assorted library of helpful date functions:
* getstrftime(secs) - returns the rfc 1123 date/time format of secs, where secs is the number
of seconds since the epoch. if secs is not given, the current system time is used
* getsecstime(timetypestring) - returns as the number of seconds since the epoch, the date/time
described in timetypestring. Returns None for invalid input
* getgmtime(timetypestring) - returns as a standard time tuple (see time and calendar), the date/time
described in timetypestring. Returns None for invalid input
The following time type strings are supported by getsecstime() and getgmtime()::
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
<|fim▁hole|> Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
"""
__docformat__ = 'reStructuredText'
import calendar
import time
def getstrftime(secs=None):
# rfc 1123 date/time format
return time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(secs))
def getsecstime(timeformat):
result = getgmtime(timeformat)
if result:
return calendar.timegm(result)
else:
return None
def getgmtime(timeformat):
# Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
try:
vtime = time.strptime(timeformat, "%a, %d %b %Y %H:%M:%S GMT")
return vtime
except:
pass
# Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036
try:
vtime = time.strptime(timeformat, "%A %d-%b-%y %H:%M:%S GMT")
return vtime
except:
pass
# Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
try:
vtime = time.strptime(timeformat, "%a %b %d %H:%M:%S %Y")
return vtime
except:
pass
return None<|fim▁end|>
|
Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036
|
<|file_name|>io.py<|end_file_name|><|fim▁begin|>## begin license ##
#
# "Weightless" is a High Performance Asynchronous Networking Library. See http://weightless.io
#
# Copyright (C) 2012-2013, 2017, 2020-2021 Seecr (Seek You Too B.V.) https://seecr.nl
#
# This file is part of "Weightless"
#
# "Weightless" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by<|fim▁hole|># the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Weightless" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Weightless"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
import sys
from contextlib import contextmanager
from functools import wraps
from io import StringIO
def _set_replaced_stream(name, replacement=None):
stream = getattr(sys, name)
def andBackAgain():
setattr(sys, name, stream)
streamReplacement = StringIO() if replacement is None else replacement
setattr(sys, name, streamReplacement)
return streamReplacement, andBackAgain
class _ContextMngrOrDecorated(object):
def __init__(self, streamName, replacement=None):
self._streamName = streamName
self._replacement = replacement
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
with self:
return func(*args, **kwargs)
return wrapper
def __enter__(self):
mockStream, self._back = _set_replaced_stream(self._streamName, self._replacement)
return mockStream
def __exit__(self, exc_type, exc_value, traceback):
self._back()
return False
def stderr_replaced(*func_arg):
if func_arg:
return _ContextMngrOrDecorated(streamName='stderr')(*func_arg)
return _ContextMngrOrDecorated(streamName='stderr')
def stdout_replaced(*func_arg):
if func_arg:
return _ContextMngrOrDecorated(streamName='stdout')(*func_arg)
return _ContextMngrOrDecorated(streamName='stdout')
def stdin_replaced(inStream=None):
return _ContextMngrOrDecorated(streamName='stdin', replacement=inStream)<|fim▁end|>
| |
<|file_name|>functions.py<|end_file_name|><|fim▁begin|>from django.utils.importlib import import_module
<|fim▁hole|> m = import_module(module)
return getattr(m, func)<|fim▁end|>
|
def function_from_string(string):
module, func = string.rsplit(".", 1)
|
<|file_name|>import6.rs<|end_file_name|><|fim▁begin|>// run-pass
#![allow(unused_imports)]
use foo::zed;
use bar::baz;<|fim▁hole|> pub fn baz() { println!("baz"); }
}
}
mod bar {
pub use foo::zed::baz;
}
pub fn main() { baz(); }<|fim▁end|>
|
mod foo {
pub mod zed {
|
<|file_name|>hwmp-protocol.cc<|end_file_name|><|fim▁begin|>/* -*- Mode:C++; c-file-style:"gnu"; indent-tabs-mode:nil; -*- */
/*
* Copyright (c) 2008,2009 IITP RAS
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation;
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Authors: Kirill Andreev <[email protected]>
*/
#include "hwmp-protocol.h"
#include "hwmp-protocol-mac.h"
#include "hwmp-tag.h"
#include "hwmp-rtable.h"
#include "ns3/log.h"
#include "ns3/simulator.h"
#include "ns3/packet.h"
#include "ns3/mesh-point-device.h"
#include "ns3/wifi-net-device.h"
#include "ns3/mesh-point-device.h"
#include "ns3/mesh-wifi-interface-mac.h"
#include "ns3/random-variable-stream.h"
#include "airtime-metric.h"
#include "ie-dot11s-preq.h"
#include "ie-dot11s-prep.h"
#include "ns3/trace-source-accessor.h"
#include "ie-dot11s-perr.h"
#include "ns3/arp-l3-protocol.h"
#include "ns3/ipv4-l3-protocol.h"
#include "ns3/udp-l4-protocol.h"
#include "ns3/tcp-l4-protocol.h"
#include "ns3/arp-header.h"
#include "ns3/ipv4-header.h"
#include "ns3/tcp-header.h"
#include "ns3/udp-header.h"
#include "ns3/rhoSigma-tag.h"
#include "ns3/llc-snap-header.h"
#include "ns3/wifi-mac-trailer.h"
#include "dot11s-mac-header.h"
NS_LOG_COMPONENT_DEFINE ("HwmpProtocol");
namespace ns3 {
namespace dot11s {
NS_OBJECT_ENSURE_REGISTERED (HwmpProtocol)
;
/* integration/qng.c
*
* Copyright (C) 1996, 1997, 1998, 1999, 2000, 2007 Brian Gough
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or (at
* your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
//#include <config.h>
#include <math.h>
#include <float.h>
TypeId
HwmpProtocol::GetTypeId ()
{
static TypeId tid = TypeId ("ns3::dot11s::HwmpProtocol")
.SetParent<MeshL2RoutingProtocol> ()
.AddConstructor<HwmpProtocol> ()
.AddAttribute ( "RandomStart",
"Random delay at first proactive PREQ",
TimeValue (Seconds (0.1)),
MakeTimeAccessor (
&HwmpProtocol::m_randomStart),
MakeTimeChecker ()
)
.AddAttribute ( "MaxQueueSize",
"Maximum number of packets we can store when resolving route",
UintegerValue (255),
MakeUintegerAccessor (
&HwmpProtocol::m_maxQueueSize),
MakeUintegerChecker<uint16_t> (1)
)
.AddAttribute ( "Dot11MeshHWMPmaxPREQretries",
"Maximum number of retries before we suppose the destination to be unreachable",
UintegerValue (3),
MakeUintegerAccessor (
&HwmpProtocol::m_dot11MeshHWMPmaxPREQretries),
MakeUintegerChecker<uint8_t> (1)
)
.AddAttribute ( "Dot11MeshHWMPnetDiameterTraversalTime",
"Time we suppose the packet to go from one edge of the network to another",
TimeValue (MicroSeconds (1024*100)),
MakeTimeAccessor (
&HwmpProtocol::m_dot11MeshHWMPnetDiameterTraversalTime),
MakeTimeChecker ()
)
.AddAttribute ( "Dot11MeshHWMPpreqMinInterval",
"Minimal interval between to successive PREQs",
TimeValue (MicroSeconds (1024*100)),
MakeTimeAccessor (
&HwmpProtocol::m_dot11MeshHWMPpreqMinInterval),
MakeTimeChecker ()
)
.AddAttribute ( "Dot11MeshHWMPperrMinInterval",
"Minimal interval between to successive PREQs",
TimeValue (MicroSeconds (1024*100)),
MakeTimeAccessor (&HwmpProtocol::m_dot11MeshHWMPperrMinInterval),
MakeTimeChecker ()
)
.AddAttribute ( "Dot11MeshHWMPactiveRootTimeout",
"Lifetime of poractive routing information",
TimeValue (MicroSeconds (1024*5000)),
MakeTimeAccessor (
&HwmpProtocol::m_dot11MeshHWMPactiveRootTimeout),
MakeTimeChecker ()
)
.AddAttribute ( "Dot11MeshHWMPactivePathTimeout",
"Lifetime of reactive routing information",
TimeValue (MicroSeconds (1024*5000)),
MakeTimeAccessor (
&HwmpProtocol::m_dot11MeshHWMPactivePathTimeout),
MakeTimeChecker ()
)
.AddAttribute ( "Dot11MeshHWMPpathToRootInterval",
"Interval between two successive proactive PREQs",
TimeValue (MicroSeconds (1024*2000)),
MakeTimeAccessor (
&HwmpProtocol::m_dot11MeshHWMPpathToRootInterval),
MakeTimeChecker ()
)
.AddAttribute ( "Dot11MeshHWMPrannInterval",
"Lifetime of poractive routing information",
TimeValue (MicroSeconds (1024*5000)),
MakeTimeAccessor (
&HwmpProtocol::m_dot11MeshHWMPrannInterval),
MakeTimeChecker ()
)
.AddAttribute ( "MaxTtl",
"Initial value of Time To Live field",
UintegerValue (32),
MakeUintegerAccessor (
&HwmpProtocol::m_maxTtl),
MakeUintegerChecker<uint8_t> (2)
)
.AddAttribute ( "UnicastPerrThreshold",
"Maximum number of PERR receivers, when we send a PERR as a chain of unicasts",
UintegerValue (32),
MakeUintegerAccessor (
&HwmpProtocol::m_unicastPerrThreshold),
MakeUintegerChecker<uint8_t> (1)
)
.AddAttribute ( "UnicastPreqThreshold",
"Maximum number of PREQ receivers, when we send a PREQ as a chain of unicasts",
UintegerValue (1),
MakeUintegerAccessor (
&HwmpProtocol::m_unicastPreqThreshold),
MakeUintegerChecker<uint8_t> (1)
)
.AddAttribute ( "UnicastDataThreshold",
"Maximum number ofbroadcast receivers, when we send a broadcast as a chain of unicasts",
UintegerValue (1),
MakeUintegerAccessor (
&HwmpProtocol::m_unicastDataThreshold),
MakeUintegerChecker<uint8_t> (1)
)
.AddAttribute ( "DoFlag",
"Destination only HWMP flag",
BooleanValue (true),
MakeBooleanAccessor (
&HwmpProtocol::m_doFlag),
MakeBooleanChecker ()
)
.AddAttribute ( "RfFlag",
"Reply and forward flag",
BooleanValue (false),
MakeBooleanAccessor (
&HwmpProtocol::m_rfFlag),
MakeBooleanChecker ()
)
.AddTraceSource ( "RouteDiscoveryTime",
"The time of route discovery procedure",
MakeTraceSourceAccessor (
&HwmpProtocol::m_routeDiscoveryTimeCallback)
)
//by hadi
.AddAttribute ( "VBMetricMargin",
"VBMetricMargin",
UintegerValue (2),
MakeUintegerAccessor (
&HwmpProtocol::m_VBMetricMargin),
MakeUintegerChecker<uint32_t> (1)
)
.AddAttribute ( "Gppm",
"G Packets Per Minutes",
UintegerValue (3600),
MakeUintegerAccessor (
&HwmpProtocol::m_Gppm),
MakeUintegerChecker<uint32_t> (1)
)
.AddTraceSource ( "TransmittingFromSource",
"",
MakeTraceSourceAccessor (
&HwmpProtocol::m_txed4mSourceCallback)
)
.AddTraceSource ( "WannaTransmittingFromSource",
"",
MakeTraceSourceAccessor (
&HwmpProtocol::m_wannaTx4mSourceCallback)
)
.AddTraceSource( "CbrCnnStateChanged",
"",
MakeTraceSourceAccessor(
&HwmpProtocol::m_CbrCnnStateChanged))
.AddTraceSource( "PacketBufferredAtSource",
"",
MakeTraceSourceAccessor(
&HwmpProtocol::m_packetBufferredAtSource))
;
return tid;
}
HwmpProtocol::HwmpProtocol () :
m_dataSeqno (1),
m_hwmpSeqno (1),
m_preqId (0),
m_rtable (CreateObject<HwmpRtable> ()),
m_randomStart (Seconds (0.1)),
m_maxQueueSize (255),
m_dot11MeshHWMPmaxPREQretries (3),
m_dot11MeshHWMPnetDiameterTraversalTime (MicroSeconds (1024*100)),
m_dot11MeshHWMPpreqMinInterval (MicroSeconds (1024*100)),
m_dot11MeshHWMPperrMinInterval (MicroSeconds (1024*100)),
m_dot11MeshHWMPactiveRootTimeout (MicroSeconds (1024*5000)),
m_dot11MeshHWMPactivePathTimeout (MicroSeconds (1024*5000)),
m_dot11MeshHWMPpathToRootInterval (MicroSeconds (1024*2000)),
m_dot11MeshHWMPrannInterval (MicroSeconds (1024*5000)),
m_isRoot (false),
m_maxTtl (32),
m_unicastPerrThreshold (32),
m_unicastPreqThreshold (1),
m_unicastDataThreshold (1),
m_doFlag (true),
m_rfFlag (false),
m_VBMetricMargin(2)
{
NS_LOG_FUNCTION_NOARGS ();
m_noDataPacketYet=true;
m_energyPerByte=0;
m_coefficient = CreateObject<UniformRandomVariable> ();
}
HwmpProtocol::~HwmpProtocol ()
{
NS_LOG_FUNCTION_NOARGS ();
}
void
HwmpProtocol::DoInitialize ()
{
m_coefficient->SetAttribute ("Max", DoubleValue (m_randomStart.GetSeconds ()));
if (m_isRoot)
{
SetRoot ();
}
Simulator::Schedule(Seconds(0.5),&HwmpProtocol::CheckCbrRoutes4Expiration,this);//hadi eo94
m_interfaces.begin ()->second->SetEnergyChangeCallback (MakeCallback(&HwmpProtocol::EnergyChange,this));
m_interfaces.begin ()->second->SetGammaChangeCallback (MakeCallback(&HwmpProtocol::GammaChange,this));
m_rtable->setSystemB (m_interfaces.begin ()->second->GetEres ());
m_rtable->setBPrim (m_rtable->systemB ());
m_rtable->setSystemBMax (m_interfaces.begin ()->second->GetBatteryCapacity ());
m_rtable->setBPrimMax (m_rtable->systemBMax ());
m_rtable->setAssignedGamma (0);
m_rtable->setGppm (m_Gppm);
GammaChange (m_rtable->systemGamma (),m_totalSimulationTime);
m_rtable->UpdateToken ();
}
void
HwmpProtocol::DoDispose ()
{
NS_LOG_FUNCTION_NOARGS ();
for (std::map<Mac48Address, PreqEvent>::iterator i = m_preqTimeouts.begin (); i != m_preqTimeouts.end (); i++)
{
i->second.preqTimeout.Cancel ();
}
m_proactivePreqTimer.Cancel ();
m_preqTimeouts.clear ();
m_lastDataSeqno.clear ();
m_hwmpSeqnoMetricDatabase.clear ();
for (std::vector<CnnBasedPreqEvent>::iterator cbpei = m_cnnBasedPreqTimeouts.begin (); cbpei != m_cnnBasedPreqTimeouts.end (); cbpei++)
{
cbpei->preqTimeout.Cancel();
}
m_cnnBasedPreqTimeouts.clear();
for(std::vector<DelayedPrepStruct>::iterator dpsi=m_delayedPrepStruct.begin ();dpsi!=m_delayedPrepStruct.end ();dpsi++)
{
dpsi->prepTimeout.Cancel ();
}
m_delayedPrepStruct.clear ();
m_interfaces.clear ();
m_rqueue.clear ();
m_rtable = 0;
m_mp = 0;
}
bool
HwmpProtocol::RequestRoute (
uint32_t sourceIface,
const Mac48Address source,
const Mac48Address destination,
Ptr<const Packet> constPacket,
uint16_t protocolType, //ethrnet 'Protocol' field
MeshL2RoutingProtocol::RouteReplyCallback routeReply
)
{
Ptr <Packet> packet = constPacket->Copy ();
HwmpTag tag;
if (sourceIface == GetMeshPoint ()->GetIfIndex ())
{
// packet from level 3
if (packet->PeekPacketTag (tag))
{
NS_FATAL_ERROR ("HWMP tag has come with a packet from upper layer. This must not occur...");
}
//Filling TAG:
if (destination == Mac48Address::GetBroadcast ())
{
tag.SetSeqno (m_dataSeqno++);
}
tag.SetTtl (m_maxTtl);
}
else
{
if (!packet->RemovePacketTag (tag))
{
NS_FATAL_ERROR ("HWMP tag is supposed to be here at this point.");
}
tag.DecrementTtl ();
if (tag.GetTtl () == 0)
{
m_stats.droppedTtl++;
return false;
}
}
if (destination == Mac48Address::GetBroadcast ())
{
m_stats.txBroadcast++;
m_stats.txBytes += packet->GetSize ();
//channel IDs where we have already sent broadcast:
std::vector<uint16_t> channels;
for (HwmpProtocolMacMap::const_iterator plugin = m_interfaces.begin (); plugin != m_interfaces.end (); plugin++)
{
bool shouldSend = true;
for (std::vector<uint16_t>::const_iterator chan = channels.begin (); chan != channels.end (); chan++)
{
if ((*chan) == plugin->second->GetChannelId ())
{
shouldSend = false;
}
}
if (!shouldSend)
{
continue;
}
channels.push_back (plugin->second->GetChannelId ());
std::vector<Mac48Address> receivers = GetBroadcastReceivers (plugin->first);
for (std::vector<Mac48Address>::const_iterator i = receivers.begin (); i != receivers.end (); i++)
{
Ptr<Packet> packetCopy = packet->Copy ();
//
// 64-bit Intel valgrind complains about tag.SetAddress (*i). It
// likes this just fine.
//
Mac48Address address = *i;
tag.SetAddress (address);
packetCopy->AddPacketTag (tag);
routeReply (true, packetCopy, source, destination, protocolType, plugin->first);
}
}
}
else
{
return ForwardUnicast (sourceIface, source, destination, packet, protocolType, routeReply, tag.GetTtl ());
}
return true;
}
bool
HwmpProtocol::RemoveRoutingStuff (uint32_t fromIface, const Mac48Address source,
const Mac48Address destination, Ptr<Packet> packet, uint16_t& protocolType)
{
HwmpTag tag;
if (!packet->RemovePacketTag (tag))
{
NS_FATAL_ERROR ("HWMP tag must exist when packet received from the network");
}
return true;
}
bool
HwmpProtocol::ForwardUnicast (uint32_t sourceIface, const Mac48Address source, const Mac48Address destination,
Ptr<Packet> packet, uint16_t protocolType, RouteReplyCallback routeReply, uint32_t ttl)
{
RhoSigmaTag rsTag;
packet->RemovePacketTag (rsTag);
Ptr<Packet> pCopy=packet->Copy();
uint8_t cnnType;//1:mac only, 2:ip only , 3:ip port
Ipv4Address srcIpv4Addr;
Ipv4Address dstIpv4Addr;
uint16_t srcPort;
uint16_t dstPort;
if(protocolType==ArpL3Protocol::PROT_NUMBER)
{
ArpHeader arpHdr;
pCopy->RemoveHeader(arpHdr);
srcIpv4Addr = arpHdr.GetSourceIpv4Address();
dstIpv4Addr = arpHdr.GetDestinationIpv4Address();
cnnType=HwmpRtable::CNN_TYPE_IP_ONLY;
// NS_LOG_HADI(m_address << " ARP packet have seen");
NS_ASSERT(true);
}
else if(protocolType==Ipv4L3Protocol::PROT_NUMBER)
{
Ipv4Header ipv4Hdr;
pCopy->RemoveHeader(ipv4Hdr);
srcIpv4Addr = ipv4Hdr.GetSource();
dstIpv4Addr = ipv4Hdr.GetDestination();
uint8_t protocol = ipv4Hdr.GetProtocol();
if(protocol==TcpL4Protocol::PROT_NUMBER)
{
TcpHeader tcpHdr;
pCopy->RemoveHeader (tcpHdr);
srcPort=tcpHdr.GetSourcePort ();
dstPort=tcpHdr.GetDestinationPort ();
cnnType=HwmpRtable::CNN_TYPE_PKT_BASED;
}
else if(protocol==UdpL4Protocol::PROT_NUMBER)
{
UdpHeader udpHdr;
pCopy->RemoveHeader(udpHdr);
srcPort=udpHdr.GetSourcePort();
dstPort=udpHdr.GetDestinationPort();
cnnType=HwmpRtable::CNN_TYPE_IP_PORT;
// NS_LOG_HADI(m_address << " UDP packet have seen " << source << "->" << destination << " " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort);
}
else
{
cnnType=HwmpRtable::CNN_TYPE_IP_ONLY;
// NS_LOG_HADI(m_address << " non TCP or UDP packet have seen");
NS_ASSERT(true);
}
}
else
{
cnnType=HwmpRtable::CNN_TYPE_MAC_ONLY;
// NS_LOG_HADI(m_address << " non IP packet have seen");
NS_ASSERT(true);
}
if((source==GetAddress())&&(cnnType==HwmpRtable::CNN_TYPE_IP_PORT)){
NS_LOG_ROUTING("hwmp forwardUnicast4mSource " << (int)packet->GetUid() << " " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " " << (int)rsTag.GetRho () << " " << (int)rsTag.GetSigma () << " " << rsTag.GetStopTime ());
m_wannaTx4mSourceCallback();
}
NS_ASSERT (destination != Mac48Address::GetBroadcast ());
NS_ASSERT(cnnType==HwmpRtable::CNN_TYPE_IP_PORT);
CbrConnection connection;
connection.destination=destination;
connection.source=source;
connection.cnnType=cnnType;
connection.dstIpv4Addr=dstIpv4Addr;
connection.srcIpv4Addr=srcIpv4Addr;
connection.dstPort=dstPort;
connection.srcPort=srcPort;
if(cnnType==HwmpRtable::CNN_TYPE_IP_PORT){
CbrConnectionsVector::iterator nrccvi=std::find(m_notRoutedCbrConnections.begin(),m_notRoutedCbrConnections.end(),connection);
if(nrccvi!=m_notRoutedCbrConnections.end()){
if(source==GetAddress()){
NS_LOG_ROUTING("hwmp cnnRejectedDrop " << (int)packet->GetUid() << " " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort);
}
return false;
}
}
HwmpRtable::CnnBasedLookupResult cnnBasedResult = m_rtable->LookupCnnBasedReactive(destination,source,cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort);
NS_LOG_DEBUG ("Requested src = "<<source<<", dst = "<<destination<<", I am "<<GetAddress ()<<", RA = "<<cnnBasedResult.retransmitter);
HwmpTag tag;
tag.SetAddress (cnnBasedResult.retransmitter);
tag.SetTtl (ttl);
//seqno and metric is not used;
packet->AddPacketTag (tag);
if (cnnBasedResult.retransmitter != Mac48Address::GetBroadcast ())
{
if(source==GetAddress())
{
NS_LOG_ROUTING("tx4mSource " << (int)packet->GetUid());
NS_LOG_CAC("tx4mSource " << srcIpv4Addr << ":" << srcPort << "=>" << dstIpv4Addr << ":" << dstPort << " " << (int)packet->GetUid());
m_txed4mSourceCallback();
SourceCbrRouteExtend (destination,source,cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort);
}
else
{
NS_LOG_CAC("forwardViaIntermediate " << srcIpv4Addr << ":" << srcPort << "=>" << dstIpv4Addr << ":" << dstPort << " " << (int)packet->GetUid());
CbrRouteExtend(destination,source,cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort);
}
//reply immediately:
//routeReply (true, packet, source, destination, protocolType, cnnBasedResult.ifIndex);
NS_LOG_TB("queuing packet in TBVB queue for send " << (int)packet->GetUid ());
m_rtable->QueueCnnBasedPacket (destination,source,cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort,packet,protocolType,cnnBasedResult.ifIndex,routeReply);
m_stats.txUnicast++;
m_stats.txBytes += packet->GetSize ();
return true;
}
if (sourceIface != GetMeshPoint ()->GetIfIndex ())
{
//Start path error procedure:
NS_LOG_DEBUG ("Must Send PERR");
m_stats.totalDropped++;
return false;
}
//Request a destination:
if (CnnBasedShouldSendPreq (rsTag, destination, source, cnnType, srcIpv4Addr, dstIpv4Addr, srcPort, dstPort))
{
NS_LOG_ROUTING("sendingPathRequest " << source << " " << destination);
uint32_t originator_seqno = GetNextHwmpSeqno ();
uint32_t dst_seqno = 0;
m_stats.initiatedPreq++;
for (HwmpProtocolMacMap::const_iterator i = m_interfaces.begin (); i != m_interfaces.end (); i++)
{
if(m_routingType==2)
i->second->RequestDestination (destination, originator_seqno, dst_seqno, cnnType, srcIpv4Addr, dstIpv4Addr, srcPort, dstPort,rsTag.GetRho (), rsTag.GetSigma (), rsTag.GetStopTime (), rsTag.delayBound (), rsTag.maxPktSize (), 0x7fffffff,0x7fffffff,0x7fffffff);
else
i->second->RequestDestination (destination, originator_seqno, dst_seqno, cnnType, srcIpv4Addr, dstIpv4Addr, srcPort, dstPort,rsTag.GetRho (), rsTag.GetSigma (), rsTag.GetStopTime (),rsTag.delayBound (), rsTag.maxPktSize (), 0,0,0);
}
}
QueuedPacket pkt;
pkt.pkt = packet;
pkt.dst = destination;
pkt.src = source;
pkt.protocol = protocolType;
pkt.reply = routeReply;
pkt.inInterface = sourceIface;
pkt.cnnType=cnnType;
pkt.srcIpv4Addr=srcIpv4Addr;
pkt.dstIpv4Addr=dstIpv4Addr;
pkt.srcPort=srcPort;
pkt.dstPort=dstPort;
if (QueuePacket (pkt))
{
if((source==GetAddress ())&&(cnnType==HwmpRtable::CNN_TYPE_IP_PORT))
m_packetBufferredAtSource(packet);
m_stats.totalQueued++;
return true;
}
else
{
m_stats.totalDropped++;
return false;
}
}
void
HwmpProtocol::ReceivePreq (IePreq preq, Mac48Address from, uint32_t interface, Mac48Address fromMp, uint32_t metric)
{
preq.IncrementMetric (metric);
NS_LOG_ROUTING("receivePreq " << from << " " << (int)preq.GetGammaPrim () << " " << (int)preq.GetBPrim () << " " << (int)preq.GetTotalE () << " " << (int)preq.GetMetric ());
//acceptance cretirea:
// bool duplicatePreq=false;
//bool freshInfo (true);
for(std::vector<CnnBasedSeqnoMetricDatabase>::iterator i=m_hwmpSeqnoMetricDatabase.begin();i!=m_hwmpSeqnoMetricDatabase.end();i++)
{
if(
(i->originatorAddress==preq.GetOriginatorAddress()) &&
(i->cnnType==preq.GetCnnType()) &&
(i->srcIpv4Addr==preq.GetSrcIpv4Addr()) &&
(i->srcPort==preq.GetSrcPort()) &&
(i->dstIpv4Addr==preq.GetDstIpv4Addr()) &&
(i->dstPort==preq.GetDstPort())
)
{
// duplicatePreq=true;
NS_LOG_ROUTING("duplicatePreq " << (int)i->originatorSeqNumber << " " << (int)preq.GetOriginatorSeqNumber ());
if ((int32_t)(i->originatorSeqNumber - preq.GetOriginatorSeqNumber ()) > 0)
{
return;
}
if (i->originatorSeqNumber == preq.GetOriginatorSeqNumber ())
{<|fim▁hole|>
if((i->totalE+m_VBMetricMargin >= preq.GetTotalE ())&&(i->totalE <= preq.GetTotalE ()+m_VBMetricMargin))
{
if((i->metric+m_VBMetricMargin*10 >= preq.GetMetric ())&&(i->metric <= preq.GetMetric ()+m_VBMetricMargin*10))
{
if(m_routingType==1)
{
if(i->bPrim<=preq.GetBPrim ())
{
NS_LOG_ROUTING("b1 rejected " << (int)i->bPrim << " " << (int)i->gammaPrim << " " << (int)preq.GetBPrim () << " " << (int)preq.GetGammaPrim ());
return;
}
}
else
{
if(i->bPrim>=preq.GetBPrim ())
{
NS_LOG_ROUTING("b2 rejected " << (int)i->bPrim << " " << (int)i->gammaPrim << " " << (int)preq.GetBPrim () << " " << (int)preq.GetGammaPrim ());
return;
}
}
}
else if (i->metric <= preq.GetMetric ())
{
NS_LOG_ROUTING("metric rejected " << (int)i->metric << " " << (int)preq.GetMetric ());
return;
}
}
else
if(m_routingType==1)
{
if(i->totalE<=preq.GetTotalE ())
{
NS_LOG_ROUTING("totalE1 rejected " << (int)i->bPrim << " " << (int)i->gammaPrim << " " << (int)preq.GetBPrim () << " " << (int)preq.GetGammaPrim ());
return;
}
}
else
{
if(i->totalE>=preq.GetTotalE ())
{
NS_LOG_ROUTING("totalE2 rejected " << (int)i->bPrim << " " << (int)i->gammaPrim << " " << (int)preq.GetBPrim () << " " << (int)preq.GetGammaPrim ());
return;
}
}
/*NS_LOG_ROUTING("checking prev " << (int)i->metric << " " << (int)preq.GetMetric () << " " << (int)m_VBMetricMargin);
if ((i->metric+m_VBMetricMargin >= preq.GetMetric ())&&(i->metric <= preq.GetMetric ()+m_VBMetricMargin))
{
// check energy metric
NS_LOG_ROUTING("in margin with one prev preq " << (int)i->metric << " " << (int)preq.GetMetric () << " " << (int)m_VBMetricMargin);
if((i->bPrim+i->gammaPrim*(preq.GetStopTime ()-Simulator::Now ()).GetSeconds ())>=(preq.GetBPrim ()+preq.GetGammaPrim ()*(preq.GetStopTime ()-Simulator::Now ()).GetSeconds ()))
{
NS_LOG_ROUTING("bgamma rejected " << (int)i->bPrim << " " << (int)i->gammaPrim << " " << (int)preq.GetBPrim () << " " << (int)preq.GetGammaPrim ());
return;
}
}
else if (i->metric <= preq.GetMetric ())
{
NS_LOG_ROUTING("metric rejected " << (int)i->metric << " " << (int)preq.GetMetric ());
return;
}*/
}
else
{
if (i->metric <= preq.GetMetric ())
{
NS_LOG_ROUTING("metric rejected " << (int)i->metric << " " << (int)preq.GetMetric ());
return;
}
}
}
m_hwmpSeqnoMetricDatabase.erase (i);
break;
}
}
CnnBasedSeqnoMetricDatabase newDb;
newDb.originatorAddress=preq.GetOriginatorAddress();
newDb.originatorSeqNumber=preq.GetOriginatorSeqNumber();
newDb.metric=preq.GetMetric();
newDb.cnnType=preq.GetCnnType();
newDb.srcIpv4Addr=preq.GetSrcIpv4Addr();
newDb.dstIpv4Addr=preq.GetDstIpv4Addr();
newDb.srcPort=preq.GetSrcPort();
newDb.dstPort=preq.GetDstPort();
newDb.gammaPrim=preq.GetGammaPrim ();
newDb.bPrim=preq.GetBPrim ();
newDb.totalE=preq.GetTotalE ();
m_hwmpSeqnoMetricDatabase.push_back(newDb);
std::vector<Ptr<DestinationAddressUnit> > destinations = preq.GetDestinationList ();
//Add reverse path to originator:
m_rtable->AddCnnBasedReversePath (preq.GetOriginatorAddress(),from,interface,preq.GetCnnType(),preq.GetSrcIpv4Addr(),preq.GetDstIpv4Addr(),preq.GetSrcPort(),preq.GetDstPort(),Seconds(1),preq.GetOriginatorSeqNumber());
//Add reactive path to originator:
for (std::vector<Ptr<DestinationAddressUnit> >::const_iterator i = destinations.begin (); i != destinations.end (); i++)
{
NS_LOG_ROUTING("receivePReq " << preq.GetOriginatorAddress() << " " << from << " " << (*i)->GetDestinationAddress ());
std::vector<Ptr<DestinationAddressUnit> > preqDestinations = preq.GetDestinationList ();
Mac48Address preqDstMac;
if(preqDestinations.size ()==1){
std::vector<Ptr<DestinationAddressUnit> >::const_iterator preqDstMacIt =preqDestinations.begin ();
preqDstMac=(*preqDstMacIt)->GetDestinationAddress();
}else{
preqDstMac=GetAddress ();
}
if ((*i)->GetDestinationAddress () == GetAddress ())
{
// if(!duplicatePreq)
{
if(m_doCAC)
{
// calculate total needed energy for entire connection lifetime and needed energy for bursts.
double totalEnergyNeeded = (m_rtable->m_maxEnergyPerDataPacket+m_rtable->m_maxEnergyPerAckPacket)*(preq.GetStopTime ()-Simulator::Now ()).GetSeconds ()*(preq.GetRho ()/60)*m_rtable->m_energyAlpha;
double burstEnergyNeeded = (m_rtable->m_maxEnergyPerDataPacket+m_rtable->m_maxEnergyPerAckPacket)*preq.GetSigma ()*m_rtable->m_energyAlpha;
double energyUntilEndOfConnection = m_rtable->bPrim ()+ m_rtable->gammaPrim ()*(preq.GetStopTime ()-Simulator::Now ()).GetSeconds ();
NS_LOG_ROUTING("ReceivePreqCACdestination " << m_rtable->m_maxEnergyPerDataPacket << " " << m_rtable->m_maxEnergyPerAckPacket << " " << (int)preq.GetRho () << " " << (int)preq.GetSigma () << " " << preq.GetStopTime () << " ; " << totalEnergyNeeded << " " << burstEnergyNeeded << " " << energyUntilEndOfConnection << " " << m_rtable->bPrim ());
if( ( ( m_rtable->bPrim ()< burstEnergyNeeded ) || ( energyUntilEndOfConnection < totalEnergyNeeded ) ) || (!m_interfaces.begin()->second->HasEnoughCapacity4NewConnection(preq.GetOriginatorAddress (),preqDstMac,preq.GetHopCount (),from,preq.GetRho ()) ) )// CAC check
{
NS_LOG_ROUTING("cac rejected the connection " << totalEnergyNeeded << " " << burstEnergyNeeded << " " << energyUntilEndOfConnection << " " << m_rtable->bPrim ());
return;
}
}
else
{
if(m_rtable->bPrim ()<=0)
{
NS_LOG_ROUTING("bPrim()<=0_1 rejected the connection " << m_rtable->bPrim ());
return;
}
}
}
NS_LOG_ROUTING("schedule2sendPrep");
Schedule2sendPrep (
GetAddress (),
preq.GetOriginatorAddress (),
preq.GetMetric(),
preq.GetCnnType(),
preq.GetSrcIpv4Addr(),
preq.GetDstIpv4Addr(),
preq.GetSrcPort(),
preq.GetDstPort(),
preq.GetRho (),
preq.GetSigma (),
preq.GetStopTime (),
preq.GetDelayBound (),
preq.GetMaxPktSize (),
preq.GetOriginatorSeqNumber (),
GetNextHwmpSeqno (),
preq.GetLifetime (),
interface
);
//NS_ASSERT (m_rtable->LookupReactive (preq.GetOriginatorAddress ()).retransmitter != Mac48Address::GetBroadcast ());
preq.DelDestinationAddressElement ((*i)->GetDestinationAddress ());
continue;
}
else
{
// if(!duplicatePreq)
{
if(m_doCAC)
{
// calculate total needed energy for entire connection lifetime and needed energy for bursts.
double totalEnergyNeeded = 2 * (m_rtable->m_maxEnergyPerDataPacket+m_rtable->m_maxEnergyPerAckPacket)*(preq.GetStopTime ()-Simulator::Now ()).GetSeconds ()*(preq.GetRho ()/60)*m_rtable->m_energyAlpha;
double burstEnergyNeeded = 2 * (m_rtable->m_maxEnergyPerDataPacket+m_rtable->m_maxEnergyPerAckPacket)*preq.GetSigma ()*m_rtable->m_energyAlpha;
double energyUntilEndOfConnection = m_rtable->bPrim ()+ m_rtable->gammaPrim ()*(preq.GetStopTime ()-Simulator::Now ()).GetSeconds ();
NS_LOG_ROUTING("ReceivePreqCACintermediate " << m_rtable->m_maxEnergyPerDataPacket << " " << m_rtable->m_maxEnergyPerAckPacket << " " << (int)preq.GetRho () << " " << (int)preq.GetSigma () << " " << preq.GetStopTime () << " ; " << totalEnergyNeeded << " " << burstEnergyNeeded << " " << energyUntilEndOfConnection << " " << m_rtable->bPrim ());
if( ( ( m_rtable->bPrim ()< burstEnergyNeeded ) || ( energyUntilEndOfConnection < totalEnergyNeeded ) ) || (!m_interfaces.begin()->second->HasEnoughCapacity4NewConnection(preq.GetOriginatorAddress (),preqDstMac,preq.GetHopCount (),from,preq.GetRho ()) ) )// CAC check
{
NS_LOG_ROUTING("cac rejected the connection " << totalEnergyNeeded << " " << burstEnergyNeeded << " " << energyUntilEndOfConnection << " " << m_rtable->bPrim ());
return;
}
}
else
{
if(m_rtable->bPrim ()<=0)
{
NS_LOG_ROUTING("bPrim()<=0_2 rejected the connection " << m_rtable->bPrim ());
return;
}
}
}
if(m_routingType==1)
preq.UpdateVBMetricSum (m_rtable->gammaPrim (),m_rtable->bPrim ());
else if(m_routingType==2)
preq.UpdateVBMetricMin (m_rtable->gammaPrim (),m_rtable->bPrim ());
}
}
NS_LOG_DEBUG ("I am " << GetAddress () << "Accepted preq from address" << from << ", preq:" << preq);
//check if must retransmit:
if (preq.GetDestCount () == 0)
{
return;
}
//Forward PREQ to all interfaces:
NS_LOG_DEBUG ("I am " << GetAddress () << "retransmitting PREQ:" << preq);
NS_LOG_ROUTING("forwardPreq");
for (HwmpProtocolMacMap::const_iterator i = m_interfaces.begin (); i != m_interfaces.end (); i++)
{
i->second->SendPreq (preq);
}
}
void
HwmpProtocol::Schedule2sendPrep(
Mac48Address src,
Mac48Address dst,
uint32_t initMetric,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort,
uint16_t rho,
uint16_t sigma,
Time stopTime,
Time delayBound,
uint16_t maxPktSize,
uint32_t originatorDsn,
uint32_t destinationSN,
uint32_t lifetime,
uint32_t interface)
{
for(std::vector<DelayedPrepStruct>::iterator dpsi=m_delayedPrepStruct.begin ();dpsi!=m_delayedPrepStruct.end ();dpsi++)
{
if(
(dpsi->destination==dst) &&
(dpsi->source==src) &&
(dpsi->cnnType==cnnType) &&
(dpsi->srcIpv4Addr==srcIpv4Addr) &&
(dpsi->dstIpv4Addr==dstIpv4Addr) &&
(dpsi->srcPort==srcPort) &&
(dpsi->dstPort==dstPort)
)
{
NS_LOG_ROUTING("scheduledBefore");
return;
}
}
DelayedPrepStruct dps;
dps.destination=dst;
dps.source=src;
dps.cnnType=cnnType;
dps.srcIpv4Addr=srcIpv4Addr;
dps.dstIpv4Addr=dstIpv4Addr;
dps.srcPort=srcPort;
dps.dstPort=dstPort;
dps.rho=rho;
dps.sigma=sigma;
dps.stopTime=stopTime;
dps.delayBound=delayBound;
dps.maxPktSize=maxPktSize;
dps.initMetric=initMetric;
dps.originatorDsn=originatorDsn;
dps.destinationSN=destinationSN;
dps.lifetime=lifetime;
dps.interface=interface;
dps.whenScheduled=Simulator::Now();
dps.prepTimeout=Simulator::Schedule(Seconds (0.1),&HwmpProtocol::SendDelayedPrep,this,dps);
NS_LOG_ROUTING("scheduled for " << "1" << " seconds");
m_delayedPrepStruct.push_back (dps);
}
void
HwmpProtocol::SendDelayedPrep(DelayedPrepStruct dps)
{
NS_LOG_ROUTING("trying to send prep to " << dps.destination);
HwmpRtable::CnnBasedLookupResult result=m_rtable->LookupCnnBasedReverse(dps.destination,dps.cnnType,dps.srcIpv4Addr,dps.dstIpv4Addr,dps.srcPort,dps.dstPort);
if (result.retransmitter == Mac48Address::GetBroadcast ())
{
NS_LOG_ROUTING("cant find reverse path");
return;
}
//this is only for assigning a VB for this connection
if(!m_rtable->AddCnnBasedReactivePath (
dps.destination,
GetAddress (),
dps.source,
result.retransmitter,
dps.interface,
dps.cnnType,
dps.srcIpv4Addr,
dps.dstIpv4Addr,
dps.srcPort,
dps.dstPort,
dps.rho,
dps.sigma,
dps.stopTime,
dps.delayBound,
dps.maxPktSize,
Seconds (dps.lifetime),
dps.originatorDsn,
false,
m_doCAC))
{
return;
}
SendPrep (
GetAddress (),
dps.destination,
result.retransmitter,
dps.initMetric,
dps.cnnType,
dps.srcIpv4Addr,
dps.dstIpv4Addr,
dps.srcPort,
dps.dstPort,
dps.rho,
dps.sigma,
dps.stopTime,
dps.delayBound,
dps.maxPktSize,
dps.originatorDsn,
dps.destinationSN,
dps.lifetime,
dps.interface
);
NS_LOG_ROUTING("prep sent and AddCnnBasedReactivePath");
//std::vector<DelayedPrepStruct>::iterator it=std::find(m_delayedPrepStruct.begin (),m_delayedPrepStruct.end (),dps);
//if(it!=m_delayedPrepStruct.end ())
// m_delayedPrepStruct.erase (it); // we dont erase the entry from the vector cause of preventing to send prep twice
}
void
HwmpProtocol::ReceivePrep (IePrep prep, Mac48Address from, uint32_t interface, Mac48Address fromMp, uint32_t metric)
{
NS_LOG_UNCOND( Simulator::Now ().GetSeconds () << " " << (int)Simulator::GetContext () << " prep received " << prep.GetSrcIpv4Addr() << ":" << (int)prep.GetSrcPort() << "=>" << prep.GetDstIpv4Addr() << ":" << (int)prep.GetDstPort());
NS_LOG_ROUTING("prep received");
if(prep.GetDestinationAddress () == GetAddress ()){
NS_LOG_ROUTING("prep received for me");
CbrConnection connection;
connection.cnnType=prep.GetCnnType ();
connection.dstIpv4Addr=prep.GetDstIpv4Addr ();
connection.srcIpv4Addr=prep.GetSrcIpv4Addr ();
connection.dstPort=prep.GetDstPort ();
connection.srcPort=prep.GetSrcPort ();
CbrConnectionsVector::iterator nrccvi=std::find(m_notRoutedCbrConnections.begin(),m_notRoutedCbrConnections.end(),connection);
if(nrccvi!=m_notRoutedCbrConnections.end()){
NS_LOG_ROUTING("sourceCnnHasDropped " << prep.GetSrcIpv4Addr() << ":" << (int)prep.GetSrcPort() << "=>" << prep.GetDstIpv4Addr() << ":" << (int)prep.GetDstPort() << " " << from);
return;
}
}
prep.IncrementMetric (metric);
//acceptance cretirea:
bool freshInfo (true);
std::vector<CnnBasedSeqnoMetricDatabase>::iterator dbit;
for(std::vector<CnnBasedSeqnoMetricDatabase>::iterator i=m_hwmpSeqnoMetricDatabase.begin();i!=m_hwmpSeqnoMetricDatabase.end();i++)
{
if(
(i->originatorAddress==prep.GetOriginatorAddress()) &&
(i->cnnType==prep.GetCnnType()) &&
(i->srcIpv4Addr==prep.GetSrcIpv4Addr()) &&
(i->srcPort==prep.GetSrcPort()) &&
(i->dstIpv4Addr==prep.GetDstIpv4Addr()) &&
(i->dstPort==prep.GetDstPort())
)
{
if ((int32_t)(i->destinationSeqNumber - prep.GetDestinationSeqNumber()) > 0)
{
/*BarghiTest 1392/08/02 add for get result start*/
//commented for hadireports std::cout << "t:" << Simulator::Now() << " ,Im " << m_address << " returning because of older preq" << std::endl;
/*BarghiTest 1392/08/02 add for get result end*/
NS_LOG_ROUTING("hwmp droppedCPREP seqnum " << prep.GetSrcIpv4Addr() << ":" << (int)prep.GetSrcPort() << "=>" << prep.GetDstIpv4Addr() << ":" << (int)prep.GetDstPort() << " " << from);
return;
}
dbit=i;
freshInfo=false;
break;
}
}
if(freshInfo)
{
CnnBasedSeqnoMetricDatabase newDb;
newDb.originatorAddress=prep.GetOriginatorAddress();
newDb.originatorSeqNumber=prep.GetOriginatorSeqNumber();
newDb.destinationAddress=prep.GetDestinationAddress();
newDb.destinationSeqNumber=prep.GetDestinationSeqNumber();
newDb.metric=prep.GetMetric();
newDb.cnnType=prep.GetCnnType();
newDb.srcIpv4Addr=prep.GetSrcIpv4Addr();
newDb.dstIpv4Addr=prep.GetDstIpv4Addr();
newDb.srcPort=prep.GetSrcPort();
newDb.dstPort=prep.GetDstPort();
m_hwmpSeqnoMetricDatabase.push_back(newDb);
if (prep.GetDestinationAddress () == GetAddress ())
{
if(!m_rtable->AddCnnBasedReactivePath (
prep.GetOriginatorAddress (),
from,
GetAddress (),
GetAddress (),
interface,
prep.GetCnnType (),
prep.GetSrcIpv4Addr (),
prep.GetDstIpv4Addr (),
prep.GetSrcPort (),
prep.GetDstPort (),
prep.GetRho (),
prep.GetSigma (),
prep.GetStopTime (),
prep.GetDelayBound (),
prep.GetMaxPktSize (),
Seconds (10000),
prep.GetOriginatorSeqNumber (),
false,
m_doCAC))
{
NS_LOG_ROUTING("cac rejected at sourceWhenPrepReceived the connection ");
CbrConnection connection;
connection.destination=prep.GetOriginatorAddress ();
connection.source=GetAddress ();
connection.cnnType=prep.GetCnnType ();
connection.dstIpv4Addr=prep.GetDstIpv4Addr ();
connection.srcIpv4Addr=prep.GetSrcIpv4Addr ();
connection.dstPort=prep.GetDstPort ();
connection.srcPort=prep.GetSrcPort ();
m_notRoutedCbrConnections.push_back (connection);
return;
}
m_rtable->AddPrecursor (prep.GetDestinationAddress (), interface, from,
MicroSeconds (prep.GetLifetime () * 1024));
/*if (result.retransmitter != Mac48Address::GetBroadcast ())
{
m_rtable->AddPrecursor (prep.GetOriginatorAddress (), interface, result.retransmitter,
result.lifetime);
}*/
//ReactivePathResolved (prep.GetOriginatorAddress ());
NS_LOG_ROUTING("hwmp routing pathResolved and AddCnnBasedReactivePath " << prep.GetOriginatorAddress ()<< " " << prep.GetSrcIpv4Addr() << ":" << (int)prep.GetSrcPort() << "=>" << prep.GetDstIpv4Addr() << ":" << (int)prep.GetDstPort() << " " << from);
CnnBasedReactivePathResolved(prep.GetOriginatorAddress (),GetAddress (),prep.GetCnnType (),prep.GetSrcIpv4Addr (),prep.GetDstIpv4Addr (),prep.GetSrcPort (),prep.GetDstPort ());
m_CbrCnnStateChanged(prep.GetSrcIpv4Addr(),prep.GetDstIpv4Addr(),prep.GetSrcPort(),prep.GetDstPort(),true);
InsertCbrCnnAtSourceIntoSourceCbrCnnsVector(prep.GetOriginatorAddress(),GetAddress (),prep.GetCnnType(),prep.GetSrcIpv4Addr(),prep.GetDstIpv4Addr(),prep.GetSrcPort(),prep.GetDstPort(),GetAddress(),from);
NS_LOG_DEBUG ("I am "<<GetAddress ()<<", resolved "<<prep.GetOriginatorAddress ());
return;
}
}else
{
NS_LOG_ROUTING("duplicate prep not allowed!");
NS_ASSERT(false);
}
//update routing info
//Now add a path to destination and add precursor to source
NS_LOG_DEBUG ("I am " << GetAddress () << ", received prep from " << prep.GetOriginatorAddress () << ", receiver was:" << from);
HwmpRtable::CnnBasedLookupResult result=m_rtable->LookupCnnBasedReverse(prep.GetDestinationAddress(),prep.GetCnnType(),prep.GetSrcIpv4Addr(),prep.GetDstIpv4Addr(),prep.GetSrcPort(),prep.GetDstPort());
if (result.retransmitter == Mac48Address::GetBroadcast ())
{
NS_LOG_ROUTING("cant find reverse path 2");
return;
}
if(!m_rtable->AddCnnBasedReactivePath ( prep.GetOriginatorAddress (),
from,
prep.GetDestinationAddress (),
result.retransmitter,
interface,
prep.GetCnnType (),
prep.GetSrcIpv4Addr (),
prep.GetDstIpv4Addr (),
prep.GetSrcPort (),
prep.GetDstPort (),
prep.GetRho (),
prep.GetSigma (),
prep.GetStopTime (),
prep.GetDelayBound (),
prep.GetMaxPktSize (),
Seconds (10000),
prep.GetOriginatorSeqNumber (),
true,
m_doCAC))
{
NS_LOG_ROUTING("cnnRejectedAtPrep " << prep.GetSrcIpv4Addr() << ":" << (int)prep.GetSrcPort() << "=>" << prep.GetDstIpv4Addr() << ":" << (int)prep.GetDstPort());
return;
}
InsertCbrCnnIntoCbrCnnsVector(prep.GetOriginatorAddress(),prep.GetDestinationAddress(),prep.GetCnnType(),prep.GetSrcIpv4Addr(),prep.GetDstIpv4Addr(),prep.GetSrcPort(),prep.GetDstPort(),result.retransmitter,from);
//Forward PREP
NS_LOG_ROUTING("hwmp routing pathSaved and AddCnnBasedReactivePath and SendPrep " << prep.GetOriginatorAddress () << " " << result.retransmitter << " " << prep.GetSrcIpv4Addr() << ":" << (int)prep.GetSrcPort() << "=>" << prep.GetDstIpv4Addr() << ":" << (int)prep.GetDstPort() << " " << from << " " << result.retransmitter);
HwmpProtocolMacMap::const_iterator prep_sender = m_interfaces.find (result.ifIndex);
NS_ASSERT (prep_sender != m_interfaces.end ());
prep_sender->second->SendPrep (prep, result.retransmitter);
}
void
HwmpProtocol::InsertCbrCnnAtSourceIntoSourceCbrCnnsVector(
Mac48Address destination,
Mac48Address source,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort,
Mac48Address prevHop,
Mac48Address nextHop
){
NS_LOG_ROUTING("hwmp inserting cnn into cnnsvector at source " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " n " << nextHop << " p " << prevHop);
CbrConnection connection;
connection.destination=destination;
connection.source=source;
connection.cnnType=cnnType;
connection.dstIpv4Addr=dstIpv4Addr;
connection.srcIpv4Addr=srcIpv4Addr;
connection.dstPort=dstPort;
connection.srcPort=srcPort;
connection.prevMac=prevHop;
connection.nextMac=nextHop;
connection.whenExpires=Simulator::Now()+MilliSeconds(SOURCE_CBR_ROUTE_EXPIRE_MILLISECONDS);
CbrConnectionsVector::iterator ccvi=std::find(m_sourceCbrConnections.begin(),m_sourceCbrConnections.end(),connection);
if(ccvi==m_sourceCbrConnections.end()){
NS_LOG_ROUTING("hwmp new, inserted at source " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " n " << nextHop << " p " << prevHop);
m_sourceCbrConnections.push_back(connection);
}else{
NS_LOG_ROUTING("hwmp exist, expiration extended at source " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " n " << nextHop << " p " << prevHop);
ccvi->whenExpires=Simulator::Now()+Seconds(SOURCE_CBR_ROUTE_EXPIRE_MILLISECONDS);
}
}
void
HwmpProtocol::SourceCbrRouteExtend(
Mac48Address destination,
Mac48Address source,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort
){
NS_LOG_ROUTING("hwmp cbr route extend at source " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort);
CbrConnection connection;
connection.destination=destination;
connection.source=source;
connection.cnnType=cnnType;
connection.dstIpv4Addr=dstIpv4Addr;
connection.srcIpv4Addr=srcIpv4Addr;
connection.dstPort=dstPort;
connection.srcPort=srcPort;
CbrConnectionsVector::iterator ccvi=std::find(m_sourceCbrConnections.begin(),m_sourceCbrConnections.end(),connection);
if(ccvi!=m_sourceCbrConnections.end()){
NS_LOG_ROUTING("hwmp cbr route really found and extended at source " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " n " << ccvi->nextMac << " p " << ccvi->prevMac);
ccvi->whenExpires=Simulator::Now()+MilliSeconds(SOURCE_CBR_ROUTE_EXPIRE_MILLISECONDS);
}else{
NS_LOG_ROUTING("hwmp cbr route not found and not extended at source " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort);
}
}
void
HwmpProtocol::InsertCbrCnnIntoCbrCnnsVector(
Mac48Address destination,
Mac48Address source,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort,
Mac48Address prevHop,
Mac48Address nextHop
){
NS_LOG_ROUTING("hwmp inserting cnn into cnnsvector " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " n " << nextHop << " p " << prevHop);
CbrConnection connection;
connection.destination=destination;
connection.source=source;
connection.cnnType=cnnType;
connection.dstIpv4Addr=dstIpv4Addr;
connection.srcIpv4Addr=srcIpv4Addr;
connection.dstPort=dstPort;
connection.srcPort=srcPort;
connection.prevMac=prevHop;
connection.nextMac=nextHop;
connection.whenExpires=Simulator::Now()+Seconds(CBR_ROUTE_EXPIRE_SECONDS);
//connection.routeExpireEvent=Simulator::Schedule(Seconds(CBR_ROUTE_EXPIRE_SECONDS),&HwmpProtocol::CbrRouteExpire,this,connection);
CbrConnectionsVector::iterator ccvi=std::find(m_cbrConnections.begin(),m_cbrConnections.end(),connection);
if(ccvi==m_cbrConnections.end()){
NS_LOG_ROUTING("hwmp new, inserted " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " n " << nextHop << " p " << prevHop);
m_cbrConnections.push_back(connection);
}else{
NS_LOG_ROUTING("hwmp exist, expiration extended " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " n " << nextHop << " p " << prevHop);
ccvi->whenExpires=Simulator::Now()+Seconds(CBR_ROUTE_EXPIRE_SECONDS);
ccvi->nextMac=nextHop;
ccvi->prevMac=prevHop;
//m_cbrConnections.erase(ccvi);
//m_cbrConnections.push_back(connection);
//ccvi->routeExpireEvent.Cancel();
//ccvi->routeExpireEvent=Simulator::Schedule(Seconds(CBR_ROUTE_EXPIRE_SECONDS),&HwmpProtocol::CbrRouteExpire,this,connection);
}
}
void
HwmpProtocol::CbrRouteExtend(
Mac48Address destination,
Mac48Address source,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort
){
NS_LOG_ROUTING("hwmp cbr route extend " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort);
CbrConnection connection;
connection.destination=destination;
connection.source=source;
connection.cnnType=cnnType;
connection.dstIpv4Addr=dstIpv4Addr;
connection.srcIpv4Addr=srcIpv4Addr;
connection.dstPort=dstPort;
connection.srcPort=srcPort;
CbrConnectionsVector::iterator ccvi=std::find(m_cbrConnections.begin(),m_cbrConnections.end(),connection);
if(ccvi!=m_cbrConnections.end()){
NS_LOG_ROUTING("hwmp cbr route really found and extended " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort << " n " << ccvi->nextMac << " p " << ccvi->prevMac);
ccvi->whenExpires=Simulator::Now()+Seconds(CBR_ROUTE_EXPIRE_SECONDS);
//ccvi->routeExpireEvent.Cancel();
//ccvi->routeExpireEvent=Simulator::Schedule(Seconds(CBR_ROUTE_EXPIRE_SECONDS),&HwmpProtocol::CbrRouteExpire,this,connection);
}else{
NS_LOG_ROUTING("hwmp cbr route not found and not extended " << srcIpv4Addr << ":" << (int)srcPort << "=>" << dstIpv4Addr << ":" << (int)dstPort);
}
}
void
HwmpProtocol::CbrRouteExpire(CbrConnection cbrCnn){
NS_LOG_ROUTING("hwmp cbr route expired " << cbrCnn.srcIpv4Addr << ":" << (int)cbrCnn.srcPort << "=>" << cbrCnn.dstIpv4Addr << ":" << (int)cbrCnn.dstPort << " n " << cbrCnn.nextMac << " p " << cbrCnn.prevMac);
CbrConnectionsVector::iterator ccvi=std::find(m_cbrConnections.begin(),m_cbrConnections.end(),cbrCnn);
if(ccvi!=m_cbrConnections.end()){
m_cbrConnections.erase(ccvi);
m_rtable->DeleteCnnBasedReactivePath(cbrCnn.destination,cbrCnn.source,cbrCnn.cnnType,cbrCnn.srcIpv4Addr,cbrCnn.dstIpv4Addr,cbrCnn.srcPort,cbrCnn.dstPort);
NS_LOG_ROUTING("hwmp cbr route deleted " << cbrCnn.srcIpv4Addr << ":" << (int)cbrCnn.srcPort << "=>" << cbrCnn.dstIpv4Addr << ":" << (int)cbrCnn.dstPort << " n " << cbrCnn.nextMac << " p " << cbrCnn.prevMac);
}
}
void
HwmpProtocol::CheckCbrRoutes4Expiration(){
CbrConnectionsVector tempvector;
bool changed=false;
for(CbrConnectionsVector::iterator ccvi=m_cbrConnections.begin();ccvi!=m_cbrConnections.end();ccvi++){
if(Simulator::Now()<ccvi->whenExpires){
tempvector.push_back(*ccvi);
}else{
changed = true;
m_rtable->DeleteCnnBasedReactivePath(ccvi->destination,ccvi->source,ccvi->cnnType,ccvi->srcIpv4Addr,ccvi->dstIpv4Addr,ccvi->srcPort,ccvi->dstPort);
NS_LOG_ROUTING("hwmp cbr route expired and deleted " << ccvi->srcIpv4Addr << ":" << (int)ccvi->srcPort << "=>" << ccvi->dstIpv4Addr << ":" << (int)ccvi->dstPort << " n " << ccvi->nextMac << " p " << ccvi->prevMac);
}
}
if(changed){
m_cbrConnections.clear();
m_cbrConnections=tempvector;
NS_LOG_ROUTING("hwmp num connections " << m_cbrConnections.size());
}
tempvector.clear();
for(CbrConnectionsVector::iterator ccvi=m_sourceCbrConnections.begin();ccvi!=m_sourceCbrConnections.end();ccvi++){
if(Simulator::Now()<ccvi->whenExpires){
tempvector.push_back(*ccvi);
}else{
changed = true;
m_CbrCnnStateChanged(ccvi->srcIpv4Addr,ccvi->dstIpv4Addr,ccvi->srcPort,ccvi->dstPort,false);
}
}
if(changed){
m_sourceCbrConnections.clear();
m_sourceCbrConnections=tempvector;
}
Simulator::Schedule(MilliSeconds(50),&HwmpProtocol::CheckCbrRoutes4Expiration,this);
}
void
HwmpProtocol::ReceivePerr (std::vector<FailedDestination> destinations, Mac48Address from, uint32_t interface, Mac48Address fromMp)
{
//Acceptance cretirea:
NS_LOG_DEBUG ("I am "<<GetAddress ()<<", received PERR from "<<from);
std::vector<FailedDestination> retval;
HwmpRtable::LookupResult result;
for (unsigned int i = 0; i < destinations.size (); i++)
{
result = m_rtable->LookupReactiveExpired (destinations[i].destination);
if (!(
(result.retransmitter != from) ||
(result.ifIndex != interface) ||
((int32_t)(result.seqnum - destinations[i].seqnum) > 0)
))
{
retval.push_back (destinations[i]);
}
}
if (retval.size () == 0)
{
return;
}
ForwardPathError (MakePathError (retval));
}
void
HwmpProtocol::SendPrep (Mac48Address src,
Mac48Address dst,
Mac48Address retransmitter,
uint32_t initMetric,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort,
uint16_t rho,
uint16_t sigma,
Time stopTime, Time delayBound, uint16_t maxPktSize,
uint32_t originatorDsn,
uint32_t destinationSN,
uint32_t lifetime,
uint32_t interface)
{
IePrep prep;
prep.SetHopcount (0);
prep.SetTtl (m_maxTtl);
prep.SetDestinationAddress (dst);
prep.SetDestinationSeqNumber (destinationSN);
prep.SetLifetime (lifetime);
prep.SetMetric (initMetric);
prep.SetCnnParams(cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort);
prep.SetRho (rho);
prep.SetSigma (sigma);
prep.SetStopTime (stopTime);
prep.SetDelayBound (delayBound);
prep.SetMaxPktSize (maxPktSize);
prep.SetOriginatorAddress (src);
prep.SetOriginatorSeqNumber (originatorDsn);
HwmpProtocolMacMap::const_iterator prep_sender = m_interfaces.find (interface);
NS_ASSERT (prep_sender != m_interfaces.end ());
prep_sender->second->SendPrep (prep, retransmitter);
m_stats.initiatedPrep++;
}
bool
HwmpProtocol::Install (Ptr<MeshPointDevice> mp)
{
m_mp = mp;
std::vector<Ptr<NetDevice> > interfaces = mp->GetInterfaces ();
for (std::vector<Ptr<NetDevice> >::const_iterator i = interfaces.begin (); i != interfaces.end (); i++)
{
// Checking for compatible net device
Ptr<WifiNetDevice> wifiNetDev = (*i)->GetObject<WifiNetDevice> ();
if (wifiNetDev == 0)
{
return false;
}
Ptr<MeshWifiInterfaceMac> mac = wifiNetDev->GetMac ()->GetObject<MeshWifiInterfaceMac> ();
if (mac == 0)
{
return false;
}
// Installing plugins:
Ptr<HwmpProtocolMac> hwmpMac = Create<HwmpProtocolMac> (wifiNetDev->GetIfIndex (), this);
m_interfaces[wifiNetDev->GetIfIndex ()] = hwmpMac;
mac->InstallPlugin (hwmpMac);
//Installing airtime link metric:
Ptr<AirtimeLinkMetricCalculator> metric = CreateObject <AirtimeLinkMetricCalculator> ();
mac->SetLinkMetricCallback (MakeCallback (&AirtimeLinkMetricCalculator::CalculateMetric, metric));
}
mp->SetRoutingProtocol (this);
// Mesh point aggregates all installed protocols
mp->AggregateObject (this);
m_address = Mac48Address::ConvertFrom (mp->GetAddress ()); // address;
return true;
}
void
HwmpProtocol::PeerLinkStatus (Mac48Address meshPointAddress, Mac48Address peerAddress, uint32_t interface, bool status)
{
if (status)
{
return;
}
std::vector<FailedDestination> destinations = m_rtable->GetUnreachableDestinations (peerAddress);
InitiatePathError (MakePathError (destinations));
}
void
HwmpProtocol::SetNeighboursCallback (Callback<std::vector<Mac48Address>, uint32_t> cb)
{
m_neighboursCallback = cb;
}
bool
HwmpProtocol::DropDataFrame (uint32_t seqno, Mac48Address source)
{
if (source == GetAddress ())
{
return true;
}
std::map<Mac48Address, uint32_t,std::less<Mac48Address> >::const_iterator i = m_lastDataSeqno.find (source);
if (i == m_lastDataSeqno.end ())
{
m_lastDataSeqno[source] = seqno;
}
else
{
if ((int32_t)(i->second - seqno) >= 0)
{
return true;
}
m_lastDataSeqno[source] = seqno;
}
return false;
}
HwmpProtocol::PathError
HwmpProtocol::MakePathError (std::vector<FailedDestination> destinations)
{
PathError retval;
//HwmpRtable increments a sequence number as written in 11B.9.7.2
retval.receivers = GetPerrReceivers (destinations);
if (retval.receivers.size () == 0)
{
return retval;
}
m_stats.initiatedPerr++;
for (unsigned int i = 0; i < destinations.size (); i++)
{
retval.destinations.push_back (destinations[i]);
m_rtable->DeleteReactivePath (destinations[i].destination);
}
return retval;
}
void
HwmpProtocol::InitiatePathError (PathError perr)
{
for (HwmpProtocolMacMap::const_iterator i = m_interfaces.begin (); i != m_interfaces.end (); i++)
{
std::vector<Mac48Address> receivers_for_interface;
for (unsigned int j = 0; j < perr.receivers.size (); j++)
{
if (i->first == perr.receivers[j].first)
{
receivers_for_interface.push_back (perr.receivers[j].second);
}
}
i->second->InitiatePerr (perr.destinations, receivers_for_interface);
}
}
void
HwmpProtocol::ForwardPathError (PathError perr)
{
for (HwmpProtocolMacMap::const_iterator i = m_interfaces.begin (); i != m_interfaces.end (); i++)
{
std::vector<Mac48Address> receivers_for_interface;
for (unsigned int j = 0; j < perr.receivers.size (); j++)
{
if (i->first == perr.receivers[j].first)
{
receivers_for_interface.push_back (perr.receivers[j].second);
}
}
i->second->ForwardPerr (perr.destinations, receivers_for_interface);
}
}
std::vector<std::pair<uint32_t, Mac48Address> >
HwmpProtocol::GetPerrReceivers (std::vector<FailedDestination> failedDest)
{
HwmpRtable::PrecursorList retval;
for (unsigned int i = 0; i < failedDest.size (); i++)
{
HwmpRtable::PrecursorList precursors = m_rtable->GetPrecursors (failedDest[i].destination);
m_rtable->DeleteReactivePath (failedDest[i].destination);
m_rtable->DeleteProactivePath (failedDest[i].destination);
for (unsigned int j = 0; j < precursors.size (); j++)
{
retval.push_back (precursors[j]);
}
}
//Check if we have dublicates in retval and precursors:
for (unsigned int i = 0; i < retval.size (); i++)
{
for (unsigned int j = i+1; j < retval.size (); j++)
{
if (retval[i].second == retval[j].second)
{
retval.erase (retval.begin () + j);
}
}
}
return retval;
}
std::vector<Mac48Address>
HwmpProtocol::GetPreqReceivers (uint32_t interface)
{
std::vector<Mac48Address> retval;
if (!m_neighboursCallback.IsNull ())
{
retval = m_neighboursCallback (interface);
}
if ((retval.size () >= m_unicastPreqThreshold) || (retval.size () == 0))
{
retval.clear ();
retval.push_back (Mac48Address::GetBroadcast ());
}
return retval;
}
std::vector<Mac48Address>
HwmpProtocol::GetBroadcastReceivers (uint32_t interface)
{
std::vector<Mac48Address> retval;
if (!m_neighboursCallback.IsNull ())
{
retval = m_neighboursCallback (interface);
}
if ((retval.size () >= m_unicastDataThreshold) || (retval.size () == 0))
{
retval.clear ();
retval.push_back (Mac48Address::GetBroadcast ());
}
return retval;
}
bool
HwmpProtocol::QueuePacket (QueuedPacket packet)
{
if (m_rqueue.size () >= m_maxQueueSize)
{
NS_LOG_CAC("packetDroppedAtHwmp " << (int)packet.pkt->GetUid () << " " << m_rqueue.size ());
return false;
}
m_rqueue.push_back (packet);
return true;
}
HwmpProtocol::QueuedPacket
HwmpProtocol::DequeueFirstPacketByCnnParams (
Mac48Address dst,
Mac48Address src,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort
)
{
QueuedPacket retval;
retval.pkt = 0;
NS_LOG_ROUTING("hwmp DequeueFirstPacketByCnnParams " << (int)m_rqueue.size());
for (std::vector<QueuedPacket>::iterator i = m_rqueue.begin (); i != m_rqueue.end (); i++)
{
if (
((*i).dst == dst) &&
((*i).src == src) &&
((*i).cnnType == cnnType) &&
((*i).srcIpv4Addr == srcIpv4Addr) &&
((*i).dstIpv4Addr == dstIpv4Addr) &&
((*i).srcPort == srcPort) &&
((*i).dstPort == dstPort)
)
{
retval = (*i);
m_rqueue.erase (i);
break;
}
}
//std::cout << Simulator::Now().GetSeconds() << " " << m_address << " SourceQueueSize " << m_rqueue.size() << std::endl;
return retval;
}
HwmpProtocol::QueuedPacket
HwmpProtocol::DequeueFirstPacketByDst (Mac48Address dst)
{
QueuedPacket retval;
retval.pkt = 0;
for (std::vector<QueuedPacket>::iterator i = m_rqueue.begin (); i != m_rqueue.end (); i++)
{
if ((*i).dst == dst)
{
retval = (*i);
m_rqueue.erase (i);
break;
}
}
return retval;
}
HwmpProtocol::QueuedPacket
HwmpProtocol::DequeueFirstPacket ()
{
QueuedPacket retval;
retval.pkt = 0;
if (m_rqueue.size () != 0)
{
retval = m_rqueue[0];
m_rqueue.erase (m_rqueue.begin ());
}
return retval;
}
void
HwmpProtocol::ReactivePathResolved (Mac48Address dst)
{
std::map<Mac48Address, PreqEvent>::iterator i = m_preqTimeouts.find (dst);
if (i != m_preqTimeouts.end ())
{
m_routeDiscoveryTimeCallback (Simulator::Now () - i->second.whenScheduled);
}
HwmpRtable::LookupResult result = m_rtable->LookupReactive (dst);
NS_ASSERT (result.retransmitter != Mac48Address::GetBroadcast ());
//Send all packets stored for this destination
QueuedPacket packet = DequeueFirstPacketByDst (dst);
while (packet.pkt != 0)
{
if(packet.src==GetAddress()){
NS_LOG_ROUTING("tx4mSource2 " << (int)packet.pkt->GetUid());
}
//set RA tag for retransmitter:
HwmpTag tag;
packet.pkt->RemovePacketTag (tag);
tag.SetAddress (result.retransmitter);
packet.pkt->AddPacketTag (tag);
m_stats.txUnicast++;
m_stats.txBytes += packet.pkt->GetSize ();
packet.reply (true, packet.pkt, packet.src, packet.dst, packet.protocol, result.ifIndex);
packet = DequeueFirstPacketByDst (dst);
}
}
void
HwmpProtocol::CnnBasedReactivePathResolved (
Mac48Address dst,
Mac48Address src,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort
)
{
HwmpRtable::CnnBasedLookupResult result = m_rtable->LookupCnnBasedReactive(dst,src,cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort);
NS_ASSERT (result.retransmitter != Mac48Address::GetBroadcast ());
//Send all packets stored for this destination
QueuedPacket packet = DequeueFirstPacketByCnnParams (dst,src,cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort);
while (packet.pkt != 0)
{
if((packet.src==GetAddress())&&(cnnType==HwmpRtable::CNN_TYPE_IP_PORT)){
NS_LOG_ROUTING("tx4mSource2 " << (int)packet.pkt->GetUid());
NS_LOG_CAC("tx4mSource2 " << (int)packet.pkt->GetUid());
m_txed4mSourceCallback();
}
//set RA tag for retransmitter:
HwmpTag tag;
packet.pkt->RemovePacketTag (tag);
tag.SetAddress (result.retransmitter);
packet.pkt->AddPacketTag (tag);
m_stats.txUnicast++;
m_stats.txBytes += packet.pkt->GetSize ();
//packet.reply (true, packet.pkt, packet.src, packet.dst, packet.protocol, result.ifIndex);
// m_rtable->QueueCnnBasedPacket (packet.dst,packet.src,cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort,packet.pkt,packet.protocol,result.ifIndex,packet.reply);
packet = DequeueFirstPacketByCnnParams (dst,src,cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort);
}
}
void
HwmpProtocol::ProactivePathResolved ()
{
//send all packets to root
HwmpRtable::LookupResult result = m_rtable->LookupProactive ();
NS_ASSERT (result.retransmitter != Mac48Address::GetBroadcast ());
QueuedPacket packet = DequeueFirstPacket ();
while (packet.pkt != 0)
{
//set RA tag for retransmitter:
HwmpTag tag;
if (!packet.pkt->RemovePacketTag (tag))
{
NS_FATAL_ERROR ("HWMP tag must be present at this point");
}
tag.SetAddress (result.retransmitter);
packet.pkt->AddPacketTag (tag);
m_stats.txUnicast++;
m_stats.txBytes += packet.pkt->GetSize ();
packet.reply (true, packet.pkt, packet.src, packet.dst, packet.protocol, result.ifIndex);
packet = DequeueFirstPacket ();
}
}
bool
HwmpProtocol::ShouldSendPreq (Mac48Address dst)
{
std::map<Mac48Address, PreqEvent>::const_iterator i = m_preqTimeouts.find (dst);
if (i == m_preqTimeouts.end ())
{
m_preqTimeouts[dst].preqTimeout = Simulator::Schedule (
Time (m_dot11MeshHWMPnetDiameterTraversalTime * 2),
&HwmpProtocol::RetryPathDiscovery, this, dst, 1);
m_preqTimeouts[dst].whenScheduled = Simulator::Now ();
return true;
}
return false;
}
bool
HwmpProtocol::CnnBasedShouldSendPreq (
RhoSigmaTag rsTag,
Mac48Address dst,
Mac48Address src,
uint8_t cnnType,
Ipv4Address srcIpv4Addr,
Ipv4Address dstIpv4Addr,
uint16_t srcPort,
uint16_t dstPort
)
{
for(std::vector<CnnBasedPreqEvent>::iterator cbpei = m_cnnBasedPreqTimeouts.begin (); cbpei != m_cnnBasedPreqTimeouts.end (); cbpei++)
{
if(
(cbpei->destination==dst) &&
(cbpei->source==src) &&
(cbpei->cnnType==cnnType) &&
(cbpei->srcIpv4Addr==srcIpv4Addr) &&
(cbpei->dstIpv4Addr==dstIpv4Addr) &&
(cbpei->srcPort==srcPort) &&
(cbpei->dstPort==dstPort)
)
{
return false;
}
}
if(src==GetAddress ())
{
if(m_doCAC)
{
// calculate total needed energy for entire connection lifetime and needed energy for bursts.
double totalEnergyNeeded = (m_rtable->m_maxEnergyPerDataPacket+m_rtable->m_maxEnergyPerAckPacket)*(rsTag.GetStopTime ()-Simulator::Now ()).GetSeconds ()*(rsTag.GetRho ()/60)*m_rtable->m_energyAlpha;
double burstEnergyNeeded = (m_rtable->m_maxEnergyPerDataPacket+m_rtable->m_maxEnergyPerAckPacket)*rsTag.GetSigma ()*m_rtable->m_energyAlpha;
double energyUntilEndOfConnection = m_rtable->bPrim ()+ m_rtable->gammaPrim ()*(rsTag.GetStopTime ()-Simulator::Now ()).GetSeconds ();
NS_LOG_ROUTING("ReceiveFirstPacketCACCheck " << m_rtable->m_maxEnergyPerDataPacket << " " << m_rtable->m_maxEnergyPerAckPacket << " " << (int)rsTag.GetRho () << " " << (int)rsTag.GetSigma () << " " << rsTag.GetStopTime () << " ; " << totalEnergyNeeded << " " << burstEnergyNeeded << " " << energyUntilEndOfConnection << " " << m_rtable->bPrim ());
if( ( ( m_rtable->bPrim () < burstEnergyNeeded ) || ( energyUntilEndOfConnection < totalEnergyNeeded ) ) || (!m_interfaces.begin ()->second->HasEnoughCapacity4NewConnection (GetAddress (),dst,0,GetAddress (),rsTag.GetRho ())) )// CAC check
{
NS_LOG_ROUTING("cac rejected at source the connection " << totalEnergyNeeded << " " << burstEnergyNeeded << " " << energyUntilEndOfConnection << " " << m_rtable->bPrim ());
CbrConnection connection;
connection.destination=dst;
connection.source=src;
connection.cnnType=cnnType;
connection.dstIpv4Addr=dstIpv4Addr;
connection.srcIpv4Addr=srcIpv4Addr;
connection.dstPort=dstPort;
connection.srcPort=srcPort;
m_notRoutedCbrConnections.push_back (connection);
return false;
}
}
else
{
if(m_rtable->bPrim ()<=0)
{
NS_LOG_ROUTING("bPrim()<=0 rejected at source the connection " << m_rtable->bPrim ());
CbrConnection connection;
connection.destination=dst;
connection.source=src;
connection.cnnType=cnnType;
connection.dstIpv4Addr=dstIpv4Addr;
connection.srcIpv4Addr=srcIpv4Addr;
connection.dstPort=dstPort;
connection.srcPort=srcPort;
m_notRoutedCbrConnections.push_back (connection);
return false;
}
}
}
CnnBasedPreqEvent cbpe;
cbpe.destination=dst;
cbpe.source=src;
cbpe.cnnType=cnnType;
cbpe.srcIpv4Addr=srcIpv4Addr;
cbpe.dstIpv4Addr=dstIpv4Addr;
cbpe.srcPort=srcPort;
cbpe.dstPort=dstPort;
cbpe.rho=rsTag.GetRho ();
cbpe.sigma=rsTag.GetSigma ();
cbpe.stopTime=rsTag.GetStopTime ();
cbpe.delayBound=rsTag.delayBound ();
cbpe.maxPktSize=rsTag.maxPktSize ();
cbpe.whenScheduled=Simulator::Now();
cbpe.preqTimeout=Simulator::Schedule(
Time (m_dot11MeshHWMPnetDiameterTraversalTime * 2),
&HwmpProtocol::CnnBasedRetryPathDiscovery,this,cbpe,1);
m_cnnBasedPreqTimeouts.push_back(cbpe);
NS_LOG_ROUTING("need to send preq");
return true;
}
void
HwmpProtocol::RetryPathDiscovery (Mac48Address dst, uint8_t numOfRetry)
{
HwmpRtable::LookupResult result = m_rtable->LookupReactive (dst);
if (result.retransmitter == Mac48Address::GetBroadcast ())
{
result = m_rtable->LookupProactive ();
}
if (result.retransmitter != Mac48Address::GetBroadcast ())
{
std::map<Mac48Address, PreqEvent>::iterator i = m_preqTimeouts.find (dst);
NS_ASSERT (i != m_preqTimeouts.end ());
m_preqTimeouts.erase (i);
return;
}
if (numOfRetry > m_dot11MeshHWMPmaxPREQretries)
{
NS_LOG_ROUTING("givingUpPathRequest " << dst);
QueuedPacket packet = DequeueFirstPacketByDst (dst);
//purge queue and delete entry from retryDatabase
while (packet.pkt != 0)
{
m_stats.totalDropped++;
packet.reply (false, packet.pkt, packet.src, packet.dst, packet.protocol, HwmpRtable::MAX_METRIC);
packet = DequeueFirstPacketByDst (dst);
}
std::map<Mac48Address, PreqEvent>::iterator i = m_preqTimeouts.find (dst);
NS_ASSERT (i != m_preqTimeouts.end ());
m_routeDiscoveryTimeCallback (Simulator::Now () - i->second.whenScheduled);
m_preqTimeouts.erase (i);
return;
}
numOfRetry++;
uint32_t originator_seqno = GetNextHwmpSeqno ();
uint32_t dst_seqno = m_rtable->LookupReactiveExpired (dst).seqnum;
NS_LOG_ROUTING("retryPathRequest " << dst);
for (HwmpProtocolMacMap::const_iterator i = m_interfaces.begin (); i != m_interfaces.end (); i++)
{
Ipv4Address tempadd;
if(m_routingType==2)
i->second->RequestDestination (dst, originator_seqno, dst_seqno,HwmpRtable::CNN_TYPE_PKT_BASED,tempadd,tempadd,0,0,0,0,Seconds (0),Seconds (0),0,0x7fffffff,0x7fffffff,0x7fffffff);
else
i->second->RequestDestination (dst, originator_seqno, dst_seqno,HwmpRtable::CNN_TYPE_PKT_BASED,tempadd,tempadd,0,0,0,0,Seconds (0),Seconds (0),0,0,0,0);
}
m_preqTimeouts[dst].preqTimeout = Simulator::Schedule (
Time ((2 * (numOfRetry + 1)) * m_dot11MeshHWMPnetDiameterTraversalTime),
&HwmpProtocol::RetryPathDiscovery, this, dst, numOfRetry);
}
void
HwmpProtocol::CnnBasedRetryPathDiscovery (
CnnBasedPreqEvent preqEvent,
uint8_t numOfRetry
)
{
HwmpRtable::CnnBasedLookupResult result = m_rtable->LookupCnnBasedReactive(preqEvent.destination,preqEvent.source,preqEvent.cnnType,preqEvent.srcIpv4Addr,preqEvent.dstIpv4Addr,preqEvent.srcPort,preqEvent.dstPort);
if (result.retransmitter != Mac48Address::GetBroadcast ())
{
for(std::vector<CnnBasedPreqEvent>::iterator cbpei = m_cnnBasedPreqTimeouts.begin (); cbpei != m_cnnBasedPreqTimeouts.end (); cbpei++)
{
if(
(cbpei->destination==preqEvent.destination) &&
(cbpei->source==preqEvent.source) &&
(cbpei->cnnType==preqEvent.cnnType) &&
(cbpei->srcIpv4Addr==preqEvent.srcIpv4Addr) &&
(cbpei->dstIpv4Addr==preqEvent.dstIpv4Addr) &&
(cbpei->srcPort==preqEvent.srcPort) &&
(cbpei->dstPort==preqEvent.dstPort)
)
{
m_cnnBasedPreqTimeouts.erase(cbpei);
return;
}
}
NS_ASSERT (false);
return;
}
if (numOfRetry > m_dot11MeshHWMPmaxPREQretries)
{
//hadireport reject connection
NS_LOG_ROUTING("hwmp connectionRejected " << preqEvent.destination << " " << preqEvent.srcIpv4Addr << ":" << (int)preqEvent.srcPort << "=>" << preqEvent.dstIpv4Addr << ":" << (int)preqEvent.dstPort);
QueuedPacket packet = DequeueFirstPacketByCnnParams (preqEvent.destination,preqEvent.source,preqEvent.cnnType,preqEvent.srcIpv4Addr,preqEvent.dstIpv4Addr,preqEvent.srcPort,preqEvent.dstPort);
CbrConnection connection;
connection.destination=preqEvent.destination;
connection.source=preqEvent.source;
connection.cnnType=preqEvent.cnnType;
connection.dstIpv4Addr=preqEvent.dstIpv4Addr;
connection.srcIpv4Addr=preqEvent.srcIpv4Addr;
connection.dstPort=preqEvent.dstPort;
connection.srcPort=preqEvent.srcPort;
CbrConnectionsVector::iterator nrccvi=std::find(m_notRoutedCbrConnections.begin(),m_notRoutedCbrConnections.end(),connection);
if(nrccvi==m_notRoutedCbrConnections.end()){
m_notRoutedCbrConnections.push_back(connection);
}
//purge queue and delete entry from retryDatabase
while (packet.pkt != 0)
{
if(packet.src==GetAddress()){
NS_LOG_ROUTING("hwmp noRouteDrop2 " << (int)packet.pkt->GetUid() << " " << preqEvent.srcIpv4Addr << ":" << (int)preqEvent.srcPort << "=>" << preqEvent.dstIpv4Addr << ":" << (int)preqEvent.dstPort);
}
m_stats.totalDropped++;
packet.reply (false, packet.pkt, packet.src, packet.dst, packet.protocol, HwmpRtable::MAX_METRIC);
packet = DequeueFirstPacketByCnnParams (preqEvent.destination,preqEvent.source,preqEvent.cnnType,preqEvent.srcIpv4Addr,preqEvent.dstIpv4Addr,preqEvent.srcPort,preqEvent.dstPort);
}
for(std::vector<CnnBasedPreqEvent>::iterator cbpei = m_cnnBasedPreqTimeouts.begin (); cbpei != m_cnnBasedPreqTimeouts.end (); cbpei++)
{
if(
(cbpei->destination==preqEvent.destination) &&
(cbpei->cnnType==preqEvent.cnnType) &&
(cbpei->srcIpv4Addr==preqEvent.srcIpv4Addr) &&
(cbpei->dstIpv4Addr==preqEvent.dstIpv4Addr) &&
(cbpei->srcPort==preqEvent.srcPort) &&
(cbpei->dstPort==preqEvent.dstPort)
)
{
m_cnnBasedPreqTimeouts.erase(cbpei);
return;
}
}
NS_ASSERT (false);
return;
}
numOfRetry++;
uint32_t originator_seqno = GetNextHwmpSeqno ();
uint32_t dst_seqno = 0;
for (HwmpProtocolMacMap::const_iterator i = m_interfaces.begin (); i != m_interfaces.end (); i++)
{
if(m_routingType==2)
i->second->RequestDestination (preqEvent.destination, originator_seqno, dst_seqno, preqEvent.cnnType, preqEvent.srcIpv4Addr, preqEvent.dstIpv4Addr, preqEvent.srcPort, preqEvent.dstPort,preqEvent.rho,preqEvent.sigma,preqEvent.stopTime,preqEvent.delayBound,preqEvent.maxPktSize, 0x7fffffff,0x7fffffff,0x7fffffff);
else
i->second->RequestDestination (preqEvent.destination, originator_seqno, dst_seqno, preqEvent.cnnType, preqEvent.srcIpv4Addr, preqEvent.dstIpv4Addr, preqEvent.srcPort, preqEvent.dstPort,preqEvent.rho,preqEvent.sigma,preqEvent.stopTime,preqEvent.delayBound,preqEvent.maxPktSize, 0,0,0);
}
for(std::vector<CnnBasedPreqEvent>::iterator cbpei = m_cnnBasedPreqTimeouts.begin (); cbpei != m_cnnBasedPreqTimeouts.end (); cbpei++)
{
if(
(cbpei->destination==preqEvent.destination) &&
(cbpei->cnnType==preqEvent.cnnType) &&
(cbpei->srcIpv4Addr==preqEvent.srcIpv4Addr) &&
(cbpei->dstIpv4Addr==preqEvent.dstIpv4Addr) &&
(cbpei->srcPort==preqEvent.srcPort) &&
(cbpei->dstPort==preqEvent.dstPort)
)
{
cbpei->preqTimeout=Simulator::Schedule(
Time ((2 * (numOfRetry + 1)) * m_dot11MeshHWMPnetDiameterTraversalTime),
&HwmpProtocol::CnnBasedRetryPathDiscovery,this,(*cbpei),numOfRetry);
cbpei->whenScheduled=Simulator::Now();
return;
}
}
CnnBasedPreqEvent cbpe;
cbpe.destination=preqEvent.destination;
cbpe.cnnType=preqEvent.cnnType;
cbpe.srcIpv4Addr=preqEvent.srcIpv4Addr;
cbpe.dstIpv4Addr=preqEvent.dstIpv4Addr;
cbpe.srcPort=preqEvent.srcPort;
cbpe.dstPort=preqEvent.dstPort;
cbpe.whenScheduled=Simulator::Now();
cbpe.preqTimeout=Simulator::Schedule(
Time ((2 * (numOfRetry + 1)) * m_dot11MeshHWMPnetDiameterTraversalTime),
&HwmpProtocol::CnnBasedRetryPathDiscovery,this,cbpe,numOfRetry);
m_cnnBasedPreqTimeouts.push_back(cbpe);
}
//Proactive PREQ routines:
void
HwmpProtocol::SetRoot ()
{
Time randomStart = Seconds (m_coefficient->GetValue ());
m_proactivePreqTimer = Simulator::Schedule (randomStart, &HwmpProtocol::SendProactivePreq, this);
NS_LOG_DEBUG ("ROOT IS: " << m_address);
m_isRoot = true;
}
void
HwmpProtocol::UnsetRoot ()
{
m_proactivePreqTimer.Cancel ();
}
void
HwmpProtocol::SendProactivePreq ()
{
IePreq preq;
//By default: must answer
preq.SetHopcount (0);
preq.SetTTL (m_maxTtl);
preq.SetLifetime (m_dot11MeshHWMPactiveRootTimeout.GetMicroSeconds () /1024);
//\attention: do not forget to set originator address, sequence
//number and preq ID in HWMP-MAC plugin
preq.AddDestinationAddressElement (true, true, Mac48Address::GetBroadcast (), 0);
preq.SetOriginatorAddress (GetAddress ());
preq.SetPreqID (GetNextPreqId ());
preq.SetOriginatorSeqNumber (GetNextHwmpSeqno ());
for (HwmpProtocolMacMap::const_iterator i = m_interfaces.begin (); i != m_interfaces.end (); i++)
{
i->second->SendPreq (preq);
}
m_proactivePreqTimer = Simulator::Schedule (m_dot11MeshHWMPpathToRootInterval, &HwmpProtocol::SendProactivePreq, this);
}
bool
HwmpProtocol::GetDoFlag ()
{
return m_doFlag;
}
bool
HwmpProtocol::GetRfFlag ()
{
return m_rfFlag;
}
Time
HwmpProtocol::GetPreqMinInterval ()
{
return m_dot11MeshHWMPpreqMinInterval;
}
Time
HwmpProtocol::GetPerrMinInterval ()
{
return m_dot11MeshHWMPperrMinInterval;
}
uint8_t
HwmpProtocol::GetMaxTtl ()
{
return m_maxTtl;
}
uint32_t
HwmpProtocol::GetNextPreqId ()
{
m_preqId++;
return m_preqId;
}
uint32_t
HwmpProtocol::GetNextHwmpSeqno ()
{
m_hwmpSeqno++;
return m_hwmpSeqno;
}
uint32_t
HwmpProtocol::GetActivePathLifetime ()
{
return m_dot11MeshHWMPactivePathTimeout.GetMicroSeconds () / 1024;
}
uint8_t
HwmpProtocol::GetUnicastPerrThreshold ()
{
return m_unicastPerrThreshold;
}
Mac48Address
HwmpProtocol::GetAddress ()
{
return m_address;
}
void
HwmpProtocol::EnergyChange (Ptr<Packet> packet,bool isAck, bool incDec, double energy, double remainedEnergy,uint32_t packetSize)
{
uint8_t cnnType;//1:mac only, 2:ip only , 3:ip port
Ipv4Address srcIpv4Addr;
Ipv4Address dstIpv4Addr;
uint16_t srcPort;
uint16_t dstPort;
if(packet!=0)
{
WifiMacHeader wmhdr;
packet->RemoveHeader (wmhdr);
WifiMacTrailer fcs;
packet->RemoveTrailer (fcs);
MeshHeader meshHdr;
packet->RemoveHeader (meshHdr);
LlcSnapHeader llc;
packet->RemoveHeader (llc);
NS_LOG_VB("rxtx packet llc protocol type " << llc.GetType ());
if(llc.GetType ()==Ipv4L3Protocol::PROT_NUMBER)
{
Ipv4Header ipv4Hdr;
packet->RemoveHeader(ipv4Hdr);
srcIpv4Addr = ipv4Hdr.GetSource();
dstIpv4Addr = ipv4Hdr.GetDestination();
uint8_t protocol = ipv4Hdr.GetProtocol();
if(protocol==TcpL4Protocol::PROT_NUMBER)
{
TcpHeader tcpHdr;
packet->RemoveHeader (tcpHdr);
srcPort=tcpHdr.GetSourcePort ();
dstPort=tcpHdr.GetDestinationPort ();
cnnType=HwmpRtable::CNN_TYPE_PKT_BASED;
}
else if(protocol==UdpL4Protocol::PROT_NUMBER)
{
UdpHeader udpHdr;
packet->RemoveHeader(udpHdr);
srcPort=udpHdr.GetSourcePort();
dstPort=udpHdr.GetDestinationPort();
cnnType=HwmpRtable::CNN_TYPE_IP_PORT;
}
else
{
cnnType=HwmpRtable::CNN_TYPE_MAC_ONLY;
}
}
else
{
cnnType=HwmpRtable::CNN_TYPE_MAC_ONLY;
}
}
double systemB=m_rtable->systemB ();
if(incDec)//increased
{
systemB+=energy;
if(systemB>m_rtable->systemBMax ())
systemB=m_rtable->systemBMax ();
if(packet==0)//increased by gamma or energyback
{
if(packetSize==0)//increased by gamma
{
m_rtable->TotalEnergyIncreasedByGamma (energy);
}
else//increased by collision energy back of other packets
{
m_rtable->ControlEnergyIncreasedByCollisionEnergyBack (energy);
}
}else//increased by collision energy back
{
m_rtable->ChangeEnergy4aConnection (cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort,energy,true);
}
}else//decreased
{
systemB-=energy;
if(systemB<0)
systemB=0;
if(packet==0)
{
if(packetSize!=0)//decreased by other types of packets
{
if(m_noDataPacketYet)
{
m_energyPerByte = 0.7 * m_energyPerByte + 0.3 * energy/packetSize;
m_rtable->SetMaxEnergyPerAckPacket (m_energyPerByte*14);
m_rtable->SetMaxEnergyPerDataPacket (m_energyPerByte*260);
//NS_LOG_VB("energyPerAckByte " << m_energyPerByte*14);
//NS_LOG_VB("energyPerDataByte " << m_energyPerByte*260);
}
}
m_rtable->ControlPacketsEnergyDecreased (energy);
}
else//decreased by data or ack for data packets
{
m_noDataPacketYet=false;
if(isAck )
{
if(energy > m_rtable->GetMaxEnergyPerAckPacket ())
m_rtable->SetMaxEnergyPerAckPacket (energy);
//NS_LOG_VB("energyPerAck " << energy);
}
else if(cnnType==HwmpRtable::CNN_TYPE_IP_PORT)
{
if(energy > m_rtable->GetMaxEnergyPerDataPacket ())
m_rtable->SetMaxEnergyPerDataPacket (energy);
//NS_LOG_VB("energyPerData " << energy);
}
if(cnnType==HwmpRtable::CNN_TYPE_IP_PORT)
{
m_rtable->ChangeEnergy4aConnection (cnnType,srcIpv4Addr,dstIpv4Addr,srcPort,dstPort,energy,false);
}
else
{
m_rtable->ControlPacketsEnergyDecreased (energy);
}
}
}
if(std::abs(systemB-remainedEnergy)>1)
{
NS_LOG_VB("remainedEnergyError " << systemB << " " << remainedEnergy);
}
else
{
//NS_LOG_VB("remainedEnergy " << systemB << " " << remainedEnergy);
}
m_rtable->setSystemB (remainedEnergy);
}
void
HwmpProtocol::GammaChange(double gamma, double totalSimmTime)
{
m_totalSimulationTime=totalSimmTime;
double remainedSimulationTimeSeconds=m_totalSimulationTime-Simulator::Now ().GetSeconds ();
double remainedControlEnergyNeeded=remainedSimulationTimeSeconds*0.035;
//double remainedControlEnergyNeeded=remainedSimulationTimeSeconds*0;
double bPrim=m_rtable->bPrim ()+m_rtable->controlB ();
double gammaPrim=m_rtable->gammaPrim ()+m_rtable->controlGamma ();
double assignedGamma=m_rtable->assignedGamma ()-m_rtable->controlGamma ();
if(bPrim>=remainedControlEnergyNeeded)
{
m_rtable->setControlB (remainedControlEnergyNeeded);
m_rtable->setControlBMax (remainedControlEnergyNeeded);
m_rtable->setControlGamma (0);
bPrim-=remainedControlEnergyNeeded;
}
else
{
m_rtable->setControlB (bPrim);
m_rtable->setControlBMax (remainedControlEnergyNeeded);
bPrim=0;
double neededControlGamma=(remainedControlEnergyNeeded-bPrim)/remainedSimulationTimeSeconds;
if(gammaPrim>=neededControlGamma)
{
m_rtable->setControlGamma (neededControlGamma);
gammaPrim-=neededControlGamma;
assignedGamma+=neededControlGamma;
}
else
{
m_rtable->setControlGamma (gammaPrim);
assignedGamma+=gammaPrim;
gammaPrim=0;
}
}
m_rtable->setSystemGamma (gamma);
m_rtable->setBPrim (bPrim);
m_rtable->setGammaPrim (gammaPrim);
m_rtable->setAssignedGamma (assignedGamma);
NS_LOG_VB("GammaChange " << gamma << " " << totalSimmTime << " | " << m_rtable->systemGamma () << " " << m_rtable->bPrim () << " " << m_rtable->gammaPrim () << " " << m_rtable->assignedGamma () << " * " << m_rtable->controlGamma () << " " << m_rtable->controlB ());
}
//Statistics:
HwmpProtocol::Statistics::Statistics () :
txUnicast (0),
txBroadcast (0),
txBytes (0),
droppedTtl (0),
totalQueued (0),
totalDropped (0),
initiatedPreq (0),
initiatedPrep (0),
initiatedPerr (0)
{
}
void HwmpProtocol::Statistics::Print (std::ostream & os) const
{
os << "<Statistics "
"txUnicast=\"" << txUnicast << "\" "
"txBroadcast=\"" << txBroadcast << "\" "
"txBytes=\"" << txBytes << "\" "
"droppedTtl=\"" << droppedTtl << "\" "
"totalQueued=\"" << totalQueued << "\" "
"totalDropped=\"" << totalDropped << "\" "
"initiatedPreq=\"" << initiatedPreq << "\" "
"initiatedPrep=\"" << initiatedPrep << "\" "
"initiatedPerr=\"" << initiatedPerr << "\"/>" << std::endl;
}
void
HwmpProtocol::Report (std::ostream & os) const
{
os << "<Hwmp "
"address=\"" << m_address << "\"" << std::endl <<
"maxQueueSize=\"" << m_maxQueueSize << "\"" << std::endl <<
"Dot11MeshHWMPmaxPREQretries=\"" << (uint16_t)m_dot11MeshHWMPmaxPREQretries << "\"" << std::endl <<
"Dot11MeshHWMPnetDiameterTraversalTime=\"" << m_dot11MeshHWMPnetDiameterTraversalTime.GetSeconds () << "\"" << std::endl <<
"Dot11MeshHWMPpreqMinInterval=\"" << m_dot11MeshHWMPpreqMinInterval.GetSeconds () << "\"" << std::endl <<
"Dot11MeshHWMPperrMinInterval=\"" << m_dot11MeshHWMPperrMinInterval.GetSeconds () << "\"" << std::endl <<
"Dot11MeshHWMPactiveRootTimeout=\"" << m_dot11MeshHWMPactiveRootTimeout.GetSeconds () << "\"" << std::endl <<
"Dot11MeshHWMPactivePathTimeout=\"" << m_dot11MeshHWMPactivePathTimeout.GetSeconds () << "\"" << std::endl <<
"Dot11MeshHWMPpathToRootInterval=\"" << m_dot11MeshHWMPpathToRootInterval.GetSeconds () << "\"" << std::endl <<
"Dot11MeshHWMPrannInterval=\"" << m_dot11MeshHWMPrannInterval.GetSeconds () << "\"" << std::endl <<
"isRoot=\"" << m_isRoot << "\"" << std::endl <<
"maxTtl=\"" << (uint16_t)m_maxTtl << "\"" << std::endl <<
"unicastPerrThreshold=\"" << (uint16_t)m_unicastPerrThreshold << "\"" << std::endl <<
"unicastPreqThreshold=\"" << (uint16_t)m_unicastPreqThreshold << "\"" << std::endl <<
"unicastDataThreshold=\"" << (uint16_t)m_unicastDataThreshold << "\"" << std::endl <<
"doFlag=\"" << m_doFlag << "\"" << std::endl <<
"rfFlag=\"" << m_rfFlag << "\">" << std::endl;
m_stats.Print (os);
for (HwmpProtocolMacMap::const_iterator plugin = m_interfaces.begin (); plugin != m_interfaces.end (); plugin++)
{
plugin->second->Report (os);
}
os << "</Hwmp>" << std::endl;
}
void
HwmpProtocol::ResetStats ()
{
m_stats = Statistics ();
for (HwmpProtocolMacMap::const_iterator plugin = m_interfaces.begin (); plugin != m_interfaces.end (); plugin++)
{
plugin->second->ResetStats ();
}
}
int64_t
HwmpProtocol::AssignStreams (int64_t stream)
{
NS_LOG_FUNCTION (this << stream);
m_coefficient->SetStream (stream);
return 1;
}
double HwmpProtocol::GetSumRhoPps()
{
return m_rtable->GetSumRhoPps ();
}
double HwmpProtocol::GetSumGPps()
{
return m_rtable->GetSumGPps ();
}
HwmpProtocol::QueuedPacket::QueuedPacket () :
pkt (0),
protocol (0),
inInterface (0)
{
}
} // namespace dot11s
} // namespace ns3<|fim▁end|>
|
//freshInfo = false;
if((m_routingType==1)||(m_routingType==2))
{
NS_LOG_ROUTING("checking prev " << i->bPrim << " " << i->gammaPrim << " " << i->totalE << " " << preq.GetBPrim () << " " << preq.GetGammaPrim () << " " << (int)preq.GetTotalE () << " " << (int)m_VBMetricMargin);
|
<|file_name|>reactor_factory.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2017, Baidu.com, Inc. All Rights Reserved
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include "compat.h"
#include "handler_map.h"
#include "reactor_factory.h"
#include "reactor_runner.h"
#include <cassert>
extern "C" {
#include <signal.h>
}
namespace palo {
std::vector<ReactorPtr> ReactorFactory::ms_reactors;
boost::thread_group ReactorFactory::ms_threads;
std::default_random_engine ReactorFactory::rng {1};
std::mutex ReactorFactory::ms_mutex;
std::atomic<int> ReactorFactory::ms_next_reactor(0);
bool ReactorFactory::ms_epollet = true;
bool ReactorFactory::proxy_master = false;
void ReactorFactory::initialize(uint16_t reactor_count) {
std::lock_guard<std::mutex> lock(ms_mutex);<|fim▁hole|> ReactorRunner rrunner;
ReactorRunner::handler_map = std::make_shared<HandlerMap>();
signal(SIGPIPE, SIG_IGN);
assert(reactor_count > 0);
ms_reactors.reserve(reactor_count+2);
for (uint16_t i=0; i<reactor_count+2; i++) {
reactor = std::make_shared<Reactor>();
ms_reactors.push_back(reactor);
rrunner.set_reactor(reactor);
ms_threads.create_thread(rrunner);
}
}
void ReactorFactory::destroy() {
ReactorRunner::shutdown = true;
for (size_t i=0; i<ms_reactors.size(); i++) {
ms_reactors[i]->poll_loop_interrupt();
}
ms_threads.join_all();
ms_reactors.clear();
ReactorRunner::handler_map = 0;
}
void ReactorFactory::join() {
ms_threads.join_all();
}
} //namespace palo<|fim▁end|>
|
if (!ms_reactors.empty())
return;
ReactorPtr reactor;
|
<|file_name|>mysql_daemon.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mysqlctl
import (
"fmt"
"reflect"
"strings"
"golang.org/x/net/context"
"github.com/youtube/vitess/go/mysql"
"github.com/youtube/vitess/go/mysql/fakesqldb"
"github.com/youtube/vitess/go/sqltypes"
"github.com/youtube/vitess/go/stats"
"github.com/youtube/vitess/go/vt/dbconnpool"
"github.com/youtube/vitess/go/vt/mysqlctl/tmutils"
tabletmanagerdatapb "github.com/youtube/vitess/go/vt/proto/tabletmanagerdata"
)
// MysqlDaemon is the interface we use for abstracting Mysqld.
type MysqlDaemon interface {
// Cnf returns the underlying mycnf
Cnf() *Mycnf
// TabletDir returns the tablet directory.
TabletDir() string
// methods related to mysql running or not
Start(ctx context.Context, mysqldArgs ...string) error
Shutdown(ctx context.Context, waitForMysqld bool) error
RunMysqlUpgrade() error
ReinitConfig(ctx context.Context) error
Wait(ctx context.Context) error
// GetMysqlPort returns the current port mysql is listening on.
GetMysqlPort() (int32, error)
// replication related methods
SlaveStatus() (Status, error)
SetSemiSyncEnabled(master, slave bool) error
SemiSyncEnabled() (master, slave bool)
SemiSyncSlaveStatus() (bool, error)
// reparenting related methods
ResetReplicationCommands() ([]string, error)
MasterPosition() (mysql.Position, error)
IsReadOnly() (bool, error)
SetReadOnly(on bool) error
SetSlavePositionCommands(pos mysql.Position) ([]string, error)
SetMasterCommands(masterHost string, masterPort int) ([]string, error)
WaitForReparentJournal(ctx context.Context, timeCreatedNS int64) error
// Used for backup restoration, to ensure we have a clean slate
ResetSlaveCommands() ([]string, error)
// DemoteMaster waits for all current transactions to finish,
// and returns the current replication position. It will not
// change the read_only state of the server.
DemoteMaster() (mysql.Position, error)
WaitMasterPos(context.Context, mysql.Position) error
// PromoteSlave makes the slave the new master. It will not change
// the read_only state of the server.
PromoteSlave(map[string]string) (mysql.Position, error)
// Schema related methods
GetSchema(dbName string, tables, excludeTables []string, includeViews bool) (*tabletmanagerdatapb.SchemaDefinition, error)
PreflightSchemaChange(dbName string, changes []string) ([]*tabletmanagerdatapb.SchemaChangeResult, error)
ApplySchemaChange(dbName string, change *tmutils.SchemaChange) (*tabletmanagerdatapb.SchemaChangeResult, error)
// GetAppConnection returns a app connection to be able to talk to the database.
GetAppConnection(ctx context.Context) (dbconnpool.PoolConnection, error)
// GetDbaConnection returns a dba connection.
GetDbaConnection() (*dbconnpool.DBConnection, error)
// GetAllPrivsConnection returns an allprivs connection (for user with all privileges except SUPER).
GetAllPrivsConnection() (*dbconnpool.DBConnection, error)
// ExecuteSuperQueryList executes a list of queries, no result
ExecuteSuperQueryList(ctx context.Context, queryList []string) error
// FetchSuperQuery executes one query, returns the result
FetchSuperQuery(ctx context.Context, query string) (*sqltypes.Result, error)
// NewSlaveConnection returns a SlaveConnection to the database.
NewSlaveConnection() (*SlaveConnection, error)
// EnableBinlogPlayback enables playback of binlog events
EnableBinlogPlayback() error
// DisableBinlogPlayback disable playback of binlog events
DisableBinlogPlayback() error
// Close will close this instance of Mysqld. It will wait for all dba
// queries to be finished.
Close()
}
// FakeMysqlDaemon implements MysqlDaemon and allows the user to fake
// everything.
type FakeMysqlDaemon struct {
// The fake SQL DB we may use for some queries
db *fakesqldb.DB
// Mycnf will be returned by Cnf()
Mycnf *Mycnf
// Running is used by Start / Shutdown
Running bool
// MysqlPort will be returned by GetMysqlPort(). Set to -1 to
// return an error.
MysqlPort int32
// Replicating is updated when calling StartSlave / StopSlave
// (it is not used at all when calling SlaveStatus, it is the
// test owner responsability to have these two match)
Replicating bool
// ResetReplicationResult is returned by ResetReplication
ResetReplicationResult []string
// ResetReplicationError is returned by ResetReplication
ResetReplicationError error
// ResetSlaveResult is returned by ResetSlave
ResetSlaveResult []string
// ResetSlaveError is returned by ResetSlave
ResetSlaveError error<|fim▁hole|>
// CurrentMasterPosition is returned by MasterPosition
// and SlaveStatus
CurrentMasterPosition mysql.Position
// SlaveStatusError is used by SlaveStatus
SlaveStatusError error
// CurrentMasterHost is returned by SlaveStatus
CurrentMasterHost string
// CurrentMasterport is returned by SlaveStatus
CurrentMasterPort int
// SecondsBehindMaster is returned by SlaveStatus
SecondsBehindMaster uint
// ReadOnly is the current value of the flag
ReadOnly bool
// SetSlavePositionCommandsPos is matched against the input
// of SetSlavePositionCommands. If it doesn't match,
// SetSlavePositionCommands will return an error.
SetSlavePositionCommandsPos mysql.Position
// SetSlavePositionCommandsResult is what
// SetSlavePositionCommands will return
SetSlavePositionCommandsResult []string
// SetMasterCommandsInput is matched against the input
// of SetMasterCommands (as "%v:%v"). If it doesn't match,
// SetMasterCommands will return an error.
SetMasterCommandsInput string
// SetMasterCommandsResult is what
// SetMasterCommands will return
SetMasterCommandsResult []string
// DemoteMasterPosition is returned by DemoteMaster
DemoteMasterPosition mysql.Position
// WaitMasterPosition is checked by WaitMasterPos, if the
// same it returns nil, if different it returns an error
WaitMasterPosition mysql.Position
// PromoteSlaveResult is returned by PromoteSlave
PromoteSlaveResult mysql.Position
// SchemaFunc provides the return value for GetSchema.
// If not defined, the "Schema" field will be used instead, see below.
SchemaFunc func() (*tabletmanagerdatapb.SchemaDefinition, error)
// Schema will be returned by GetSchema. If nil we'll
// return an error.
Schema *tabletmanagerdatapb.SchemaDefinition
// PreflightSchemaChangeResult will be returned by PreflightSchemaChange.
// If nil we'll return an error.
PreflightSchemaChangeResult []*tabletmanagerdatapb.SchemaChangeResult
// ApplySchemaChangeResult will be returned by ApplySchemaChange.
// If nil we'll return an error.
ApplySchemaChangeResult *tabletmanagerdatapb.SchemaChangeResult
// DbAppConnectionFactory is the factory for making fake db app connections
DbAppConnectionFactory func() (dbconnpool.PoolConnection, error)
// ExpectedExecuteSuperQueryList is what we expect
// ExecuteSuperQueryList to be called with. If it doesn't
// match, ExecuteSuperQueryList will return an error.
// Note each string is just a substring if it begins with SUB,
// so we support partial queries (usefull when queries contain
// data fields like timestamps)
ExpectedExecuteSuperQueryList []string
// ExpectedExecuteSuperQueryCurrent is the current index of the queries
// we expect
ExpectedExecuteSuperQueryCurrent int
// FetchSuperQueryResults is used by FetchSuperQuery
FetchSuperQueryMap map[string]*sqltypes.Result
// BinlogPlayerEnabled is used by {Enable,Disable}BinlogPlayer
BinlogPlayerEnabled bool
// SemiSyncMasterEnabled represents the state of rpl_semi_sync_master_enabled.
SemiSyncMasterEnabled bool
// SemiSyncSlaveEnabled represents the state of rpl_semi_sync_slave_enabled.
SemiSyncSlaveEnabled bool
}
// NewFakeMysqlDaemon returns a FakeMysqlDaemon where mysqld appears
// to be running, based on a fakesqldb.DB.
// 'db' can be nil if the test doesn't use a database at all.
func NewFakeMysqlDaemon(db *fakesqldb.DB) *FakeMysqlDaemon {
return &FakeMysqlDaemon{
db: db,
Running: true,
}
}
// Cnf is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) Cnf() *Mycnf {
return fmd.Mycnf
}
// TabletDir is part of the MysqlDaemon interface.
func (fmd *FakeMysqlDaemon) TabletDir() string {
return ""
}
// Start is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) Start(ctx context.Context, mysqldArgs ...string) error {
if fmd.Running {
return fmt.Errorf("fake mysql daemon already running")
}
fmd.Running = true
return nil
}
// Shutdown is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) Shutdown(ctx context.Context, waitForMysqld bool) error {
if !fmd.Running {
return fmt.Errorf("fake mysql daemon not running")
}
fmd.Running = false
return nil
}
// RunMysqlUpgrade is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) RunMysqlUpgrade() error {
return nil
}
// ReinitConfig is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) ReinitConfig(ctx context.Context) error {
return nil
}
// Wait is part of the MysqlDaemon interface.
func (fmd *FakeMysqlDaemon) Wait(ctx context.Context) error {
return nil
}
// GetMysqlPort is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) GetMysqlPort() (int32, error) {
if fmd.MysqlPort == -1 {
return 0, fmt.Errorf("FakeMysqlDaemon.GetMysqlPort returns an error")
}
return fmd.MysqlPort, nil
}
// SlaveStatus is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) SlaveStatus() (Status, error) {
if fmd.SlaveStatusError != nil {
return Status{}, fmd.SlaveStatusError
}
return Status{
Position: fmd.CurrentMasterPosition,
SecondsBehindMaster: fmd.SecondsBehindMaster,
SlaveIORunning: fmd.Replicating,
SlaveSQLRunning: fmd.Replicating,
MasterHost: fmd.CurrentMasterHost,
MasterPort: fmd.CurrentMasterPort,
}, nil
}
// ResetReplicationCommands is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) ResetReplicationCommands() ([]string, error) {
return fmd.ResetReplicationResult, fmd.ResetReplicationError
}
// ResetSlaveCommands is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) ResetSlaveCommands() ([]string, error) {
return fmd.ResetSlaveResult, fmd.ResetSlaveError
}
// MasterPosition is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) MasterPosition() (mysql.Position, error) {
return fmd.CurrentMasterPosition, nil
}
// IsReadOnly is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) IsReadOnly() (bool, error) {
return fmd.ReadOnly, nil
}
// SetReadOnly is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) SetReadOnly(on bool) error {
fmd.ReadOnly = on
return nil
}
// SetSlavePositionCommands is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) SetSlavePositionCommands(pos mysql.Position) ([]string, error) {
if !reflect.DeepEqual(fmd.SetSlavePositionCommandsPos, pos) {
return nil, fmt.Errorf("wrong pos for SetSlavePositionCommands: expected %v got %v", fmd.SetSlavePositionCommandsPos, pos)
}
return fmd.SetSlavePositionCommandsResult, nil
}
// SetMasterCommands is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) SetMasterCommands(masterHost string, masterPort int) ([]string, error) {
input := fmt.Sprintf("%v:%v", masterHost, masterPort)
if fmd.SetMasterCommandsInput != input {
return nil, fmt.Errorf("wrong input for SetMasterCommands: expected %v got %v", fmd.SetMasterCommandsInput, input)
}
return fmd.SetMasterCommandsResult, nil
}
// WaitForReparentJournal is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) WaitForReparentJournal(ctx context.Context, timeCreatedNS int64) error {
return nil
}
// DemoteMaster is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) DemoteMaster() (mysql.Position, error) {
return fmd.DemoteMasterPosition, nil
}
// WaitMasterPos is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) WaitMasterPos(_ context.Context, pos mysql.Position) error {
if reflect.DeepEqual(fmd.WaitMasterPosition, pos) {
return nil
}
return fmt.Errorf("wrong input for WaitMasterPos: expected %v got %v", fmd.WaitMasterPosition, pos)
}
// PromoteSlave is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) PromoteSlave(hookExtraEnv map[string]string) (mysql.Position, error) {
return fmd.PromoteSlaveResult, nil
}
// ExecuteSuperQueryList is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) ExecuteSuperQueryList(ctx context.Context, queryList []string) error {
for _, query := range queryList {
// test we still have a query to compare
if fmd.ExpectedExecuteSuperQueryCurrent >= len(fmd.ExpectedExecuteSuperQueryList) {
return fmt.Errorf("unexpected extra query in ExecuteSuperQueryList: %v", query)
}
// compare the query
expected := fmd.ExpectedExecuteSuperQueryList[fmd.ExpectedExecuteSuperQueryCurrent]
fmd.ExpectedExecuteSuperQueryCurrent++
if strings.HasPrefix(expected, "SUB") {
// remove the SUB from the expected,
// and truncate the query to length(expected)
expected = expected[3:]
if len(query) > len(expected) {
query = query[:len(expected)]
}
}
if expected != query {
return fmt.Errorf("wrong query for ExecuteSuperQueryList: expected %v got %v", expected, query)
}
// intercept some queries to update our status
switch query {
case SQLStartSlave:
fmd.Replicating = true
case SQLStopSlave:
fmd.Replicating = false
}
}
return nil
}
// FetchSuperQuery returns the results from the map, if any
func (fmd *FakeMysqlDaemon) FetchSuperQuery(ctx context.Context, query string) (*sqltypes.Result, error) {
if fmd.FetchSuperQueryMap == nil {
return nil, fmt.Errorf("unexpected query: %v", query)
}
qr, ok := fmd.FetchSuperQueryMap[query]
if !ok {
return nil, fmt.Errorf("unexpected query: %v", query)
}
return qr, nil
}
// NewSlaveConnection is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) NewSlaveConnection() (*SlaveConnection, error) {
panic(fmt.Errorf("not implemented on FakeMysqlDaemon"))
}
// EnableBinlogPlayback is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) EnableBinlogPlayback() error {
if fmd.BinlogPlayerEnabled {
return fmt.Errorf("binlog player already enabled")
}
fmd.BinlogPlayerEnabled = true
return nil
}
// DisableBinlogPlayback disable playback of binlog events
func (fmd *FakeMysqlDaemon) DisableBinlogPlayback() error {
if fmd.BinlogPlayerEnabled {
return fmt.Errorf("binlog player already disabled")
}
fmd.BinlogPlayerEnabled = false
return nil
}
// Close is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) Close() {
}
// CheckSuperQueryList returns an error if all the queries we expected
// haven't been seen.
func (fmd *FakeMysqlDaemon) CheckSuperQueryList() error {
if fmd.ExpectedExecuteSuperQueryCurrent != len(fmd.ExpectedExecuteSuperQueryList) {
return fmt.Errorf("SuperQueryList wasn't consumed, saw %v queries, was expecting %v", fmd.ExpectedExecuteSuperQueryCurrent, len(fmd.ExpectedExecuteSuperQueryList))
}
return nil
}
// GetSchema is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) GetSchema(dbName string, tables, excludeTables []string, includeViews bool) (*tabletmanagerdatapb.SchemaDefinition, error) {
if fmd.SchemaFunc != nil {
return fmd.SchemaFunc()
}
if fmd.Schema == nil {
return nil, fmt.Errorf("no schema defined")
}
return tmutils.FilterTables(fmd.Schema, tables, excludeTables, includeViews)
}
// PreflightSchemaChange is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) PreflightSchemaChange(dbName string, changes []string) ([]*tabletmanagerdatapb.SchemaChangeResult, error) {
if fmd.PreflightSchemaChangeResult == nil {
return nil, fmt.Errorf("no preflight result defined")
}
return fmd.PreflightSchemaChangeResult, nil
}
// ApplySchemaChange is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) ApplySchemaChange(dbName string, change *tmutils.SchemaChange) (*tabletmanagerdatapb.SchemaChangeResult, error) {
if fmd.ApplySchemaChangeResult == nil {
return nil, fmt.Errorf("no apply schema defined")
}
return fmd.ApplySchemaChangeResult, nil
}
// GetAppConnection is part of the MysqlDaemon interface
func (fmd *FakeMysqlDaemon) GetAppConnection(ctx context.Context) (dbconnpool.PoolConnection, error) {
if fmd.DbAppConnectionFactory == nil {
return nil, fmt.Errorf("no DbAppConnectionFactory set in this FakeMysqlDaemon")
}
return fmd.DbAppConnectionFactory()
}
// GetDbaConnection is part of the MysqlDaemon interface.
func (fmd *FakeMysqlDaemon) GetDbaConnection() (*dbconnpool.DBConnection, error) {
return dbconnpool.NewDBConnection(fmd.db.ConnParams(), stats.NewTimings(""))
}
// GetAllPrivsConnection is part of the MysqlDaemon interface.
func (fmd *FakeMysqlDaemon) GetAllPrivsConnection() (*dbconnpool.DBConnection, error) {
return dbconnpool.NewDBConnection(fmd.db.ConnParams(), stats.NewTimings(""))
}
// SetSemiSyncEnabled is part of the MysqlDaemon interface.
func (fmd *FakeMysqlDaemon) SetSemiSyncEnabled(master, slave bool) error {
fmd.SemiSyncMasterEnabled = master
fmd.SemiSyncSlaveEnabled = slave
return nil
}
// SemiSyncEnabled is part of the MysqlDaemon interface.
func (fmd *FakeMysqlDaemon) SemiSyncEnabled() (master, slave bool) {
return fmd.SemiSyncMasterEnabled, fmd.SemiSyncSlaveEnabled
}
// SemiSyncSlaveStatus is part of the MysqlDaemon interface.
func (fmd *FakeMysqlDaemon) SemiSyncSlaveStatus() (bool, error) {
// The fake assumes the status worked.
return fmd.SemiSyncSlaveEnabled, nil
}<|fim▁end|>
| |
<|file_name|>urls.rs<|end_file_name|><|fim▁begin|>/* vim: set et: */
use url;
use hyper;
use types::FolderId;
use types::ProgramId;
use std::fmt;
pub enum EVUrl {
Login,
Folder(FolderId),
Program(ProgramId),
Move(ProgramId, FolderId)
}
impl hyper::client::IntoUrl for EVUrl {
fn into_url(self) -> Result<url::Url, url::ParseError> {
// TODO: Implement Into<String> for EVUrl
let s: String = self.to_string();
url::Url::parse(&s)<|fim▁hole|> fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
EVUrl::Login => write!(fmt, "https://api.elisaviihde.fi/etvrecorder/login.sl"),
EVUrl::Folder(ref id) => match *id {
FolderId::Root => write!(fmt, "https://api.elisaviihde.fi/etvrecorder/ready.sl?ajax=true"),
ref id => write!(fmt, "https://api.elisaviihde.fi/etvrecorder/ready.sl?folderid={}&ppos=0&ajax=true", id),
},
EVUrl::Program(ref id) => write!(fmt, "https://api.elisaviihde.fi/etvrecorder/program.sl?programid={}&ppos=0&ajax=true", id),
EVUrl::Move(ref pid, ref fid) => write!(fmt, "https://api.elisaviihde.fi/etvrecorder/ready.sl?ajax=true&move=true&destination={}&programviewid={}", fid, pid)
}
}
}
#[cfg(test)]
mod tests {
use super::EVUrl;
use types::FolderId;
use types::ProgramId;
#[test]
fn show_login_url() {
let url = EVUrl::Login;
assert!(url.to_string() == "https://api.elisaviihde.fi/etvrecorder/login.sl");
}
#[test]
fn show_root_folder_url() {
let url = EVUrl::Folder(FolderId::Root);
assert!(url.to_string() == "https://api.elisaviihde.fi/etvrecorder/ready.sl?ajax=true");
}
#[test]
fn show_non_root_folder_url() {
let url = EVUrl::Folder(FolderId::FolderId(123));
assert!(url.to_string() == "https://api.elisaviihde.fi/etvrecorder/ready.sl?folderid=123&ppos=0&ajax=true");
}
#[test]
fn show_program_url() {
let url = EVUrl::Program(ProgramId::ProgramId(123));
assert!(url.to_string() == "https://api.elisaviihde.fi/etvrecorder/program.sl?programid=123&ppos=0&ajax=true");
}
#[test]
fn show_move_url() {
let url = EVUrl::Move(ProgramId::ProgramId(123), FolderId::FolderId(321));
assert!(url.to_string() == "https://api.elisaviihde.fi/etvrecorder/ready.sl?ajax=true&move=true&destination=321&programviewid=123");
}
}<|fim▁end|>
|
}
}
impl fmt::Display for EVUrl {
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.