prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>pipette_config.py<|end_file_name|><|fim▁begin|>from __future__ import annotations
from dataclasses import dataclass
import logging
import json
import numbers
from typing import (Any, Dict, List, Union, Tuple,
Sequence, TYPE_CHECKING)
from opentrons import config
from opentrons.config import feature_flags as ff
from opentrons_shared_data.pipette import (
model_config, name_config, fuse_specs)
if TYPE_CHECKING:
from opentrons_shared_data.pipette.dev_types import (
PipetteName, PipetteModel, UlPerMm, Quirk, PipetteFusedSpec
)
log = logging.getLogger(__name__)
@dataclass
class PipetteConfig:
top: float
bottom: float
blow_out: float
drop_tip: float
pick_up_current: float
pick_up_distance: float
pick_up_increment: float
pick_up_presses: int
pick_up_speed: float
aspirate_flow_rate: float
dispense_flow_rate: float
channels: float
model_offset: Tuple[float, float, float]
plunger_current: float
drop_tip_current: float
drop_tip_speed: float
min_volume: float
max_volume: float
ul_per_mm: UlPerMm
quirks: List[Quirk]
tip_length: float # TODO(seth): remove
# TODO: Replace entirely with tip length calibration
tip_overlap: Dict[str, float]
display_name: str
name: PipetteName
back_compat_names: List[PipetteName]
return_tip_height: float
blow_out_flow_rate: float
max_travel: float
home_position: float
steps_per_mm: float
idle_current: float
default_blow_out_flow_rates: Dict[str, float]
default_aspirate_flow_rates: Dict[str, float]
default_dispense_flow_rates: Dict[str, float]
model: PipetteModel
# Notes:
# - multi-channel pipettes share the same dimensional offsets
# - single-channel pipettes have different lengths
# - Default number of seconds to aspirate/dispense a pipette's full volume,
# and these times were chosen to mimic normal human-pipetting motions.
# However, accurate speeds are dependent on environment (ex: liquid
# viscosity), therefore a pipette's flow-rates (ul/sec) should be set by
# protocol writer
# Multi-channel y offset caluclations:
DISTANCE_BETWEEN_NOZZLES = 9
NUM_MULTI_CHANNEL_NOZZLES = 8
MULTI_LENGTH = (NUM_MULTI_CHANNEL_NOZZLES - 1) * DISTANCE_BETWEEN_NOZZLES
Y_OFFSET_MULTI = MULTI_LENGTH / 2
Z_OFFSET_MULTI = -25.8
Z_OFFSET_P10 = -13 # longest single-channel pipette
Z_OFFSET_P50 = 0
Z_OFFSET_P300 = 0
Z_OFFSET_P1000 = 20 # shortest single-channel pipette
LOW_CURRENT_DEFAULT = 0.05
config_models = list(model_config()['config'].keys())
config_names = list(name_config().keys())
configs = model_config()['config']
#: A list of pipette model names for which we have config entries
MUTABLE_CONFIGS = model_config()['mutableConfigs']
#: A list of mutable configs for pipettes
VALID_QUIRKS = model_config()['validQuirks']
#: A list of valid quirks for pipettes
def load(
pipette_model: PipetteModel,
pipette_id: str = None) -> PipetteConfig:
"""
Load pipette config data
This function loads from a combination of
- the pipetteModelSpecs.json file in the wheel (should never be edited)
- the pipetteNameSpecs.json file in the wheel (should never be edited)
- any config overrides found in
``opentrons.config.CONFIG['pipette_config_overrides_dir']``
This function reads from disk each time, so changes to the overrides
will be picked up in subsequent calls.
:param str pipette_model: The pipette model name (i.e. "p10_single_v1.3")
for which to load configuration
:param pipette_id: An (optional) unique ID for the pipette to locate
config overrides. If the ID is not specified, the system
assumes this is a simulated pipette and does not
save settings. If the ID is specified but no overrides
corresponding to the ID are found, the system creates a
new overrides file for it.
:type pipette_id: str or None
:raises KeyError: if ``pipette_model`` is not in the top-level keys of
the pipetteModelSpecs.json file (and therefore not in
:py:attr:`configs`)
:returns PipetteConfig: The configuration, loaded and checked
"""
# Load the model config and update with the name config
cfg = fuse_specs(pipette_model)
# Load overrides if we have a pipette id
if pipette_id:
try:
override = load_overrides(pipette_id)
if 'quirks' in override.keys():
override['quirks'] = [
qname for qname, qval in override['quirks'].items()
if qval]
for legacy_key in (
'defaultAspirateFlowRate',
'defaultDispenseFlowRate',
'defaultBlowOutFlowRate'):
override.pop(legacy_key, None)
except FileNotFoundError:
save_overrides(pipette_id, {}, pipette_model)
log.info(
"Save defaults for pipette model {} and id {}".format(
pipette_model, pipette_id))
else:
cfg.update(override) # type: ignore
# the ulPerMm functions are structured in pipetteModelSpecs.json as
# a list sorted from oldest to newest. That means the latest functions
# are always the last element and, as of right now, the older ones are
# the first element (for models that only have one function, the first
# and last elements are the same, which is fine). If we add more in the
# future, we’ll have to change this code to select items more
# intelligently
if ff.use_old_aspiration_functions():
log.debug("Using old aspiration functions")
ul_per_mm = cfg['ulPerMm'][0]
else:
ul_per_mm = cfg['ulPerMm'][-1]
smoothie_configs = cfg['smoothieConfigs']
res = PipetteConfig(
top=ensure_value(
cfg, 'top', MUTABLE_CONFIGS),
bottom=ensure_value(
cfg, 'bottom', MUTABLE_CONFIGS),
blow_out=ensure_value(
cfg, 'blowout', MUTABLE_CONFIGS),
drop_tip=ensure_value(
cfg, 'dropTip', MUTABLE_CONFIGS),
pick_up_current=ensure_value(cfg, 'pickUpCurrent', MUTABLE_CONFIGS),
pick_up_distance=ensure_value(cfg, 'pickUpDistance', MUTABLE_CONFIGS),
pick_up_increment=ensure_value(
cfg, 'pickUpIncrement', MUTABLE_CONFIGS),
pick_up_presses=ensure_value(cfg, 'pickUpPresses', MUTABLE_CONFIGS),
pick_up_speed=ensure_value(cfg, 'pickUpSpeed', MUTABLE_CONFIGS),
aspirate_flow_rate=cfg['defaultAspirateFlowRate']['value'],
dispense_flow_rate=cfg['defaultDispenseFlowRate']['value'],
channels=ensure_value(cfg, 'channels', MUTABLE_CONFIGS),
model_offset=ensure_value(cfg, 'modelOffset', MUTABLE_CONFIGS),
plunger_current=ensure_value(cfg, 'plungerCurrent', MUTABLE_CONFIGS),
drop_tip_current=ensure_value(cfg, 'dropTipCurrent', MUTABLE_CONFIGS),
drop_tip_speed=ensure_value(cfg, 'dropTipSpeed', MUTABLE_CONFIGS),
min_volume=ensure_value(cfg, 'minVolume', MUTABLE_CONFIGS),
max_volume=ensure_value(cfg, 'maxVolume', MUTABLE_CONFIGS),
ul_per_mm=ul_per_mm,
quirks=validate_quirks(ensure_value(cfg, 'quirks', MUTABLE_CONFIGS)),
tip_overlap=cfg['tipOverlap'],
tip_length=ensure_value(cfg, 'tipLength', MUTABLE_CONFIGS),
display_name=ensure_value(cfg, 'displayName', MUTABLE_CONFIGS),
name=cfg['name'],
back_compat_names=cfg.get('backCompatNames', []),
return_tip_height=cfg.get('returnTipHeight', 0.5),
blow_out_flow_rate=cfg['defaultBlowOutFlowRate']['value'],
max_travel=smoothie_configs['travelDistance'],
home_position=smoothie_configs['homePosition'],
steps_per_mm=smoothie_configs['stepsPerMM'],
idle_current=cfg.get('idleCurrent', LOW_CURRENT_DEFAULT),
default_blow_out_flow_rates=cfg['defaultBlowOutFlowRate'].get(
'valuesByApiLevel',
{'2.0': cfg['defaultBlowOutFlowRate']['value']}),
default_dispense_flow_rates=cfg['defaultDispenseFlowRate'].get(
'valuesByApiLevel',
{'2.0': cfg['defaultDispenseFlowRate']['value']}),
default_aspirate_flow_rates=cfg['defaultAspirateFlowRate'].get(
'valuesByApiLevel',
{'2.0': cfg['defaultAspirateFlowRate']['value']}),
model=pipette_model,
)
return res
def piecewise_volume_conversion(
ul: float, sequence: List[List[float]]) -> float:
"""
Takes a volume in microliters and a sequence representing a piecewise
function for the slope and y-intercept of a ul/mm function, where each
sub-list in the sequence contains:
- the max volume for the piece of the function (minimum implied from the
max of the previous item or 0
- the slope of the segment
- the y-intercept of the segment
:return: the ul/mm value for the specified volume
"""
# pick the first item from the seq for which the target is less than
# the bracketing element
i = list(filter(lambda x: ul <= x[0], sequence))[0]
# use that element to calculate the movement distance in mm
return i[1]*ul + i[2]
TypeOverrides = Dict[str, Union[float, bool, None]]
def validate_overrides(data: TypeOverrides,
config_model: Dict) -> None:
"""
Check that override fields are valid.
:param data: a dict of field name to value
:param config_model: the configuration for the chosen model
:raises ValueError: If a value is invalid
"""
for key, value in data.items():
field_config = config_model.get(key)
is_quirk = key in config_model['quirks']
if is_quirk:
# If it's a quirk it must be a bool or None
if value is not None and not isinstance(value, bool):
raise ValueError(f'{value} is invalid for {key}')
elif not field_config:
# If it's not a quirk we must have a field config
raise ValueError(f'Unknown field {key}')
elif value is not None:
# If value is not None it must be numeric and between min and max
if not isinstance(value, numbers.Number):
raise ValueError(f'{value} is invalid for {key}')
elif value < field_config['min'] or value > field_config['max']:
raise ValueError(f'{key} out of range with {value}')
def override(pipette_id: str, fields: TypeOverrides):
"""
Override configuration for pipette. Validate then save.
:param pipette_id: The pipette id
:param fields: Dict of field name to override value
"""
config_match = list_mutable_configs(pipette_id)
whole_config, model = load_config_dict(pipette_id)
validate_overrides(data=fields, config_model=config_match)
save_overrides(pipette_id, fields, model)
def save_overrides(pipette_id: str,
overrides: TypeOverrides,
model: PipetteModel):
"""
Save overrides for the pipette.
:param pipette_id: The pipette id
:param overrides: The incoming values
:param model: The model of pipette
:return: None
"""
override_dir = config.CONFIG['pipette_config_overrides_dir']
model_configs = configs[model]
model_configs_quirks = {key: True for key in model_configs['quirks']}
try:
existing = load_overrides(pipette_id)
# Add quirks setting for pipettes already with a pipette id file
if 'quirks' not in existing.keys():
existing['quirks'] = model_configs_quirks
except FileNotFoundError:
existing = model_configs_quirks # type: ignore
for key, value in overrides.items():
# If an existing override is saved as null from endpoint, remove from
# overrides file
if value is None:
if existing.get(key):
del existing[key]
elif isinstance(value, bool):
existing, model_configs = change_quirks(
{key: value}, existing, model_configs)
else:
# type ignores are here because mypy needs typed dict accesses to
# be string literals sadly enough
model_config_value = model_configs[key] # type: ignore
if not model_config_value.get('default'):
model_config_value['default']\
= model_config_value['value']
model_config_value['value'] = value
existing[key] = model_config_value
assert model in config_models
existing['model'] = model
json.dump(existing, (override_dir/f'{pipette_id}.json').open('w'))
def change_quirks(override_quirks, existing, model_configs):
if not existing.get('quirks'):
# ensure quirk key exists
existing['quirks'] = override_quirks
for quirk, setting in override_quirks.items():
# setting values again if above case true, but<|fim▁hole|> # for one setting
existing['quirks'][quirk] = setting
if setting not in model_configs['quirks']:
model_configs['quirks'].append(quirk)
elif not setting:
model_configs['quirks'].remove(quirk)
return existing, model_configs
def load_overrides(pipette_id: str) -> Dict[str, Any]:
overrides = config.CONFIG['pipette_config_overrides_dir']
fi = (overrides/f'{pipette_id}.json').open()
try:
return json.load(fi)
except json.JSONDecodeError as e:
log.warning(f'pipette override for {pipette_id} is corrupt: {e}')
(overrides/f'{pipette_id}.json').unlink()
raise FileNotFoundError(str(overrides/f'{pipette_id}.json'))
def validate_quirks(quirks: List[str]):
valid_quirks = []
for quirk in quirks:
if quirk in VALID_QUIRKS:
valid_quirks.append(quirk)
else:
log.warning(f'{quirk} is not a valid quirk')
return valid_quirks
def ensure_value(
config: PipetteFusedSpec,
name: Union[str, Tuple[str, ...]],
mutable_config_list: List[str]):
"""
Pull value of config data from file. Shape can either be a dictionary with
a value key -- indicating that it can be changed -- or another
data structure such as an array.
"""
if not isinstance(name, tuple):
path: Tuple[str, ...] = (name,)
else:
path = name
for element in path[:-1]:
config = config[element] # type: ignore
value = config[path[-1]] # type: ignore
if path[-1] != 'quirks' and path[-1] in mutable_config_list:
value = value['value']
return value
def known_pipettes() -> Sequence[str]:
""" List pipette IDs for which we have known overrides """
return [fi.stem
for fi in config.CONFIG['pipette_config_overrides_dir'].iterdir()
if fi.is_file() and '.json' in fi.suffixes]
def add_default(cfg):
if isinstance(cfg, dict):
if 'value' in cfg.keys():
cfg['default'] = cfg['value']
else:
for top_level_key in cfg.keys():
add_default(cfg[top_level_key])
def load_config_dict(pipette_id: str) -> Tuple[
'PipetteFusedSpec', 'PipetteModel']:
""" Give updated config with overrides for a pipette. This will add
the default value for a mutable config before returning the modified
config value.
"""
override = load_overrides(pipette_id)
model = override['model']
config = fuse_specs(model)
if 'quirks' not in override.keys():
override['quirks'] = {key: True for key in config['quirks']}
for top_level_key in config.keys():
if top_level_key != 'quirks':
add_default(config[top_level_key]) # type: ignore
config.update(override) # type: ignore
return config, model
def list_mutable_configs(pipette_id: str) -> Dict[str, Any]:
"""
Returns dict of mutable configs only.
"""
cfg: Dict[str, Any] = {}
if pipette_id in known_pipettes():
config, model = load_config_dict(pipette_id)
else:
log.info(f'Pipette id {pipette_id} not found')
return cfg
for key in config:
if key in MUTABLE_CONFIGS:
cfg[key] = config[key] # type: ignore
return cfg<|fim▁end|> | # meant for use-cases where we may only be given an update |
<|file_name|>deployment.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1beta1
import (
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
types "k8s.io/apimachinery/pkg/types"
watch "k8s.io/apimachinery/pkg/watch"
rest "k8s.io/client-go/rest"
v1beta1 "k8s.io/kubernetes/pkg/apis/apps/v1beta1"
scheme "k8s.io/kubernetes/pkg/client/clientset_generated/clientset/scheme"
)
// DeploymentsGetter has a method to return a DeploymentInterface.
// A group's client should implement this interface.
type DeploymentsGetter interface {
Deployments(namespace string) DeploymentInterface
}
// DeploymentInterface has methods to work with Deployment resources.
type DeploymentInterface interface {
Create(*v1beta1.Deployment) (*v1beta1.Deployment, error)
Update(*v1beta1.Deployment) (*v1beta1.Deployment, error)
UpdateStatus(*v1beta1.Deployment) (*v1beta1.Deployment, error)
Delete(name string, options *v1.DeleteOptions) error
DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error
Get(name string, options v1.GetOptions) (*v1beta1.Deployment, error)
List(opts v1.ListOptions) (*v1beta1.DeploymentList, error)
Watch(opts v1.ListOptions) (watch.Interface, error)
Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1beta1.Deployment, err error)
DeploymentExpansion
}
// deployments implements DeploymentInterface
type deployments struct {
client rest.Interface
ns string
}
// newDeployments returns a Deployments
func newDeployments(c *AppsV1beta1Client, namespace string) *deployments {
return &deployments{
client: c.RESTClient(),
ns: namespace,
}
}
// Get takes name of the deployment, and returns the corresponding deployment object, and an error if there is any.
func (c *deployments) Get(name string, options v1.GetOptions) (result *v1beta1.Deployment, err error) {
result = &v1beta1.Deployment{}
err = c.client.Get().
Namespace(c.ns).
Resource("deployments").
Name(name).
VersionedParams(&options, scheme.ParameterCodec).
Do().
Into(result)
return
}
// List takes label and field selectors, and returns the list of Deployments that match those selectors.
func (c *deployments) List(opts v1.ListOptions) (result *v1beta1.DeploymentList, err error) {
result = &v1beta1.DeploymentList{}
err = c.client.Get().
Namespace(c.ns).
Resource("deployments").
VersionedParams(&opts, scheme.ParameterCodec).
Do().
Into(result)
return
}
// Watch returns a watch.Interface that watches the requested deployments.
func (c *deployments) Watch(opts v1.ListOptions) (watch.Interface, error) {
opts.Watch = true
return c.client.Get().
Namespace(c.ns).
Resource("deployments").
VersionedParams(&opts, scheme.ParameterCodec).
Watch()
}<|fim▁hole|> err = c.client.Post().
Namespace(c.ns).
Resource("deployments").
Body(deployment).
Do().
Into(result)
return
}
// Update takes the representation of a deployment and updates it. Returns the server's representation of the deployment, and an error, if there is any.
func (c *deployments) Update(deployment *v1beta1.Deployment) (result *v1beta1.Deployment, err error) {
result = &v1beta1.Deployment{}
err = c.client.Put().
Namespace(c.ns).
Resource("deployments").
Name(deployment.Name).
Body(deployment).
Do().
Into(result)
return
}
// UpdateStatus was generated because the type contains a Status member.
// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus().
func (c *deployments) UpdateStatus(deployment *v1beta1.Deployment) (result *v1beta1.Deployment, err error) {
result = &v1beta1.Deployment{}
err = c.client.Put().
Namespace(c.ns).
Resource("deployments").
Name(deployment.Name).
SubResource("status").
Body(deployment).
Do().
Into(result)
return
}
// Delete takes name of the deployment and deletes it. Returns an error if one occurs.
func (c *deployments) Delete(name string, options *v1.DeleteOptions) error {
return c.client.Delete().
Namespace(c.ns).
Resource("deployments").
Name(name).
Body(options).
Do().
Error()
}
// DeleteCollection deletes a collection of objects.
func (c *deployments) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {
return c.client.Delete().
Namespace(c.ns).
Resource("deployments").
VersionedParams(&listOptions, scheme.ParameterCodec).
Body(options).
Do().
Error()
}
// Patch applies the patch and returns the patched deployment.
func (c *deployments) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1beta1.Deployment, err error) {
result = &v1beta1.Deployment{}
err = c.client.Patch(pt).
Namespace(c.ns).
Resource("deployments").
SubResource(subresources...).
Name(name).
Body(data).
Do().
Into(result)
return
}<|fim▁end|> |
// Create takes the representation of a deployment and creates it. Returns the server's representation of the deployment, and an error, if there is any.
func (c *deployments) Create(deployment *v1beta1.Deployment) (result *v1beta1.Deployment, err error) {
result = &v1beta1.Deployment{} |
<|file_name|>swf.js<|end_file_name|><|fim▁begin|>YUI.add('swf', function (Y, NAME) {
/**
* Embed a Flash applications in a standard manner and communicate with it
* via External Interface.
* @module swf
*/
var Event = Y.Event,
SWFDetect = Y.SWFDetect,
Lang = Y.Lang,
uA = Y.UA,
Node = Y.Node,
Escape = Y.Escape,
// private
FLASH_CID = "clsid:d27cdb6e-ae6d-11cf-96b8-444553540000",
FLASH_TYPE = "application/x-shockwave-flash",
FLASH_VER = "10.0.22",
EXPRESS_INSTALL_URL = "http://fpdownload.macromedia.com/pub/flashplayer/update/current/swf/autoUpdater.swf?" + Math.random(),
EVENT_HANDLER = "SWF.eventHandler",
possibleAttributes = {align:"", allowFullScreen:"", allowNetworking:"", allowScriptAccess:"", base:"", bgcolor:"", loop:"", menu:"", name:"", play: "", quality:"", salign:"", scale:"", tabindex:"", wmode:""};
/**
* The SWF utility is a tool for embedding Flash applications in HTML pages.
* @module swf
* @title SWF Utility
* @requires event-custom, node, swfdetect
*/
/**
* Creates the SWF instance and keeps the configuration data
*
* @class SWF
* @augments Y.Event.Target
* @constructor
* @param {String|HTMLElement} id The id of the element, or the element itself that the SWF will be inserted into.
* The width and height of the SWF will be set to the width and height of this container element.
* @param {String} swfURL The URL of the SWF to be embedded into the page.
* @param {Object} p_oAttributes (optional) Configuration parameters for the Flash application and values for Flashvars
* to be passed to the SWF. The p_oAttributes object allows the following additional properties:
* <dl>
* <dt>version : String</dt>
* <dd>The minimum version of Flash required on the user's machine.</dd>
* <dt>fixedAttributes : Object</dt>
* <dd>An object literal containing one or more of the following String keys and their values: <code>align,
* allowFullScreen, allowNetworking, allowScriptAccess, base, bgcolor, menu, name, quality, salign, scale,
* tabindex, wmode.</code> event from the thumb</dd>
* </dl>
*/
function SWF (p_oElement /*:String*/, swfURL /*:String*/, p_oAttributes /*:Object*/ ) {
this._id = Y.guid("yuiswf");
var _id = this._id;
var oElement = Node.one(p_oElement);
var p_oAttributes = p_oAttributes || {};
var flashVersion = p_oAttributes.version || FLASH_VER;
var flashVersionSplit = (flashVersion + '').split(".");
var isFlashVersionRight = SWFDetect.isFlashVersionAtLeast(parseInt(flashVersionSplit[0], 10), parseInt(flashVersionSplit[1], 10), parseInt(flashVersionSplit[2], 10));
var canExpressInstall = (SWFDetect.isFlashVersionAtLeast(8,0,0));
var shouldExpressInstall = canExpressInstall && !isFlashVersionRight && p_oAttributes.useExpressInstall;
var flashURL = (shouldExpressInstall)?EXPRESS_INSTALL_URL:swfURL;
var objstring = '<object ';
var w, h;
var flashvarstring = "yId=" + Y.id + "&YUISwfId=" + _id + "&YUIBridgeCallback=" + EVENT_HANDLER + "&allowedDomain=" + document.location.hostname;
Y.SWF._instances[_id] = this;
if (oElement && (isFlashVersionRight || shouldExpressInstall) && flashURL) {
objstring += 'id="' + _id + '" ';
if (uA.ie) {
objstring += 'classid="' + FLASH_CID + '" ';
} else {
objstring += 'type="' + FLASH_TYPE + '" data="' + Escape.html(flashURL) + '" ';
}
w = "100%";
h = "100%";
objstring += 'width="' + w + '" height="' + h + '">';
if (uA.ie) {
objstring += '<param name="movie" value="' + Escape.html(flashURL) + '"/>';
}
for (var attribute in p_oAttributes.fixedAttributes) {
if (possibleAttributes.hasOwnProperty(attribute)) {
objstring += '<param name="' + Escape.html(attribute) + '" value="' + Escape.html(p_oAttributes.fixedAttributes[attribute]) + '"/>';
}
}
for (var flashvar in p_oAttributes.flashVars) {
var fvar = p_oAttributes.flashVars[flashvar];
if (Lang.isString(fvar)) {
flashvarstring += "&" + Escape.html(flashvar) + "=" + Escape.html(encodeURIComponent(fvar));
}
}
if (flashvarstring) {
objstring += '<param name="flashVars" value="' + flashvarstring + '"/>';
}
objstring += "</object>";
//using innerHTML as setHTML/setContent causes some issues with ExternalInterface for IE versions of the player
oElement.set("innerHTML", objstring);
this._swf = Node.one("#" + _id);
} else {
/**
* Fired when the Flash player version on the user's machine is
* below the required value.
*
* @event wrongflashversion
*/
var event = {};
event.type = "wrongflashversion";
this.publish("wrongflashversion", {fireOnce:true});
this.fire("wrongflashversion", event);<|fim▁hole|> * @private
* The static collection of all instances of the SWFs on the page.
* @property _instances
* @type Object
*/
SWF._instances = SWF._instances || {};
/**
* @private
* Handles an event coming from within the SWF and delegate it
* to a specific instance of SWF.
* @method eventHandler
* @param swfid {String} the id of the SWF dispatching the event
* @param event {Object} the event being transmitted.
*/
SWF.eventHandler = function (swfid, event) {
SWF._instances[swfid]._eventHandler(event);
};
SWF.prototype = {
/**
* @private
* Propagates a specific event from Flash to JS.
* @method _eventHandler
* @param event {Object} The event to be propagated from Flash.
*/
_eventHandler: function(event) {
if (event.type === "swfReady") {
this.publish("swfReady", {fireOnce:true});
this.fire("swfReady", event);
} else if(event.type === "log") {
} else {
this.fire(event.type, event);
}
},
/**
* Calls a specific function exposed by the SWF's
* ExternalInterface.
* @method callSWF
* @param func {String} the name of the function to call
* @param args {Array} the set of arguments to pass to the function.
*/
callSWF: function (func, args)
{
if (!args) {
args= [];
}
if (this._swf._node[func]) {
return(this._swf._node[func].apply(this._swf._node, args));
} else {
return null;
}
},
/**
* Public accessor to the unique name of the SWF instance.
*
* @method toString
* @return {String} Unique name of the SWF instance.
*/
toString: function()
{
return "SWF " + this._id;
}
};
Y.augment(SWF, Y.EventTarget);
Y.SWF = SWF;
}, '@VERSION@', {"requires": ["event-custom", "node", "swfdetect", "escape"]});<|fim▁end|> | }
}
/** |
<|file_name|>test_itempage.py<|end_file_name|><|fim▁begin|>import unittest
import json
import os
from pywikibase import ItemPage, Claim
try:
unicode = unicode
except NameError:
basestring = (str, bytes)
class TestItemPage(unittest.TestCase):
def setUp(self):
with open(os.path.join(os.path.split(__file__)[0],
'data', 'Q7251.wd')) as f:
self._content = json.load(f)['entities']['Q7251']
self.item_page = ItemPage()
self.item_page.get(content=self._content)
<|fim▁hole|> def test_init_item(self):
self.assertEqual(self.item_page.getID(), 'Q7251')
self.assertRaises(RuntimeError, ItemPage, title='Null')
self.assertRaises(RuntimeError, ItemPage, title='P15')
def test_sitelinks(self):
self.assertEqual(len(self.item_page.sitelinks), 134)
self.assertIn('fawiki', self.item_page.sitelinks)
self.assertNotIn('fa', self.item_page.sitelinks)
self.assertIsInstance(self.item_page.sitelinks['enwiki'], basestring)
def test_add_claim(self):
claim = Claim('P17', datatype='wikibase-item')
claim.setTarget(ItemPage('Q91'))
self.item_page.addClaim(claim)
self.assertIn('P17', self.item_page.claims)
self.assertEqual(len(self.item_page.claims['P17']), 1)
self.assertIsInstance(self.item_page.claims['P17'][0], Claim)
def test_remove_claim(self):
claim = self.item_page.claims['P31'][0]
old_claims = self.item_page.claims.copy()
self.item_page.removeClaims(claim)
self.assertNotEqual(self.item_page.claims, old_claims)
self.assertNotIn('P31', self.item_page.claims)
def test_badges(self):
self.assertEqual(len(self.item_page.badges), 4)
self.assertEqual(self.item_page.badges['enwiki'], ['Q17437798'])
self.assertIn('enwiki', self.item_page.badges)
self.assertNotIn('fawiki', self.item_page.badges)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>nocompile_driver.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implements a simple "negative compile" test for C++ on linux.
Sometimes a C++ API needs to ensure that various usages cannot compile. To
enable unittesting of these assertions, we use this python script to
invoke the compiler on a source file and assert that compilation fails.
For more info, see:
http://dev.chromium.org/developers/testing/no-compile-tests
"""
from __future__ import print_function
import StringIO
import ast
import os
import re
import select
import subprocess
import sys
import tempfile
import time
# Matches lines that start with #if and have the substring TEST in the
# conditional. Also extracts the comment. This allows us to search for
# lines like the following:
#
# #ifdef NCTEST_NAME_OF_TEST // [r'expected output']
# #if defined(NCTEST_NAME_OF_TEST) // [r'expected output']
# #if NCTEST_NAME_OF_TEST // [r'expected output']
# #elif NCTEST_NAME_OF_TEST // [r'expected output']
# #elif DISABLED_NCTEST_NAME_OF_TEST // [r'expected output']
#
# inside the unittest file.
NCTEST_CONFIG_RE = re.compile(r'^#(?:el)?if.*\s+(\S*NCTEST\S*)\s*(//.*)?')
# Matches and removes the defined() preprocesor predicate. This is useful
# for test cases that use the preprocessor if-statement form:
#
# #if defined(NCTEST_NAME_OF_TEST)
#
# Should be used to post-process the results found by NCTEST_CONFIG_RE.
STRIP_DEFINED_RE = re.compile(r'defined\((.*)\)')
# Used to grab the expectation from comment at the end of an #ifdef. See
# NCTEST_CONFIG_RE's comment for examples of what the format should look like.
#
# The extracted substring should be a python array of regular expressions.
EXTRACT_EXPECTATION_RE = re.compile(r'//\s*(\[.*\])')
# The header for the result file so that it can be compiled.
RESULT_FILE_HEADER = """
// This file is generated by the no compile test from:
// %s
#include "base/logging.h"
#include "testing/gtest/include/gtest/gtest.h"
"""
# The log message on a test completion.
LOG_TEMPLATE = """
TEST(%s, %s) took %f secs. Started at %f, ended at %f.
"""
# The GUnit test function to output for a successful or disabled test.
GUNIT_TEMPLATE = """
TEST(%s, %s) { }
"""
# Timeout constants.
NCTEST_TERMINATE_TIMEOUT_SEC = 120
NCTEST_KILL_TIMEOUT_SEC = NCTEST_TERMINATE_TIMEOUT_SEC + 2
BUSY_LOOP_MAX_TIME_SEC = NCTEST_KILL_TIMEOUT_SEC * 2
def ValidateInput(compiler, parallelism, sourcefile_path, cflags,
resultfile_path):
"""Make sure the arguments being passed in are sane."""
assert os.path.isfile(compiler)
assert parallelism >= 1
assert type(sourcefile_path) is str
assert type(cflags) is list
for flag in cflags:
assert type(flag) is str
assert type(resultfile_path) is str
def ParseExpectation(expectation_string):
"""Extracts expectation definition from the trailing comment on the ifdef.
See the comment on NCTEST_CONFIG_RE for examples of the format we are parsing.
Args:
expectation_string: A string like "// [r'some_regex']"
Returns:
A list of compiled regular expressions indicating all possible valid
compiler outputs. If the list is empty, all outputs are considered valid.
"""
assert expectation_string is not None
match = EXTRACT_EXPECTATION_RE.match(expectation_string)
assert match
raw_expectation = ast.literal_eval(match.group(1))
assert type(raw_expectation) is list
expectation = []
for regex_str in raw_expectation:
assert type(regex_str) is str
expectation.append(re.compile(regex_str))
return expectation
def ExtractTestConfigs(sourcefile_path, suite_name):
"""Parses the source file for test configurations.
Each no-compile test in the file is separated by an ifdef macro. We scan
the source file with the NCTEST_CONFIG_RE to find all ifdefs that look like
they demark one no-compile test and try to extract the test configuration
from that.
Args:
sourcefile_path: The path to the source file.
suite_name: The name of the test suite.
Returns:
A list of test configurations. Each test configuration is a dictionary of
the form:
{ name: 'NCTEST_NAME'
suite_name: 'SOURCE_FILE_NAME'
expectations: [re.Pattern, re.Pattern] }
The |suite_name| is used to generate a pretty gtest output on successful
completion of the no compile test.
The compiled regexps in |expectations| define the valid outputs of the
compiler. If any one of the listed patterns matches either the stderr or
stdout from the compilation, and the compilation failed, then the test is
considered to have succeeded. If the list is empty, than we ignore the
compiler output and just check for failed compilation. If |expectations|
is actually None, then this specifies a compiler sanity check test, which
should expect a SUCCESSFUL compilation.
"""
sourcefile = open(sourcefile_path, 'r')
# Start with at least the compiler sanity test. You need to always have one
# sanity test to show that compiler flags and configuration are not just
# wrong. Otherwise, having a misconfigured compiler, or an error in the
# shared portions of the .nc file would cause all tests to erroneously pass.
test_configs = []
for line in sourcefile:
match_result = NCTEST_CONFIG_RE.match(line)
if not match_result:
continue
groups = match_result.groups()
# Grab the name and remove the defined() predicate if there is one.
name = groups[0]
strip_result = STRIP_DEFINED_RE.match(name)
if strip_result:
name = strip_result.group(1)
# Read expectations if there are any.
test_configs.append({'name': name,
'suite_name': suite_name,
'expectations': ParseExpectation(groups[1])})
sourcefile.close()
return test_configs
def StartTest(compiler, sourcefile_path, tempfile_dir, cflags, config):
"""Start one negative compile test.
Args:
sourcefile_path: The path to the source file.
tempfile_dir: A directory to store temporary data from tests.
cflags: An array of strings with all the CFLAGS to give to gcc.
config: A dictionary describing the test. See ExtractTestConfigs
for a description of the config format.
Returns:
A dictionary containing all the information about the started test. The
fields in the dictionary are as follows:
{ 'proc': A subprocess object representing the compiler run.
'cmdline': The executed command line.
'name': The name of the test.
'suite_name': The suite name to use when generating the gunit test
result.
'terminate_timeout': The timestamp in seconds since the epoch after
which the test should be terminated.
'kill_timeout': The timestamp in seconds since the epoch after which
the test should be given a hard kill signal.
'started_at': A timestamp in seconds since the epoch for when this test
was started.
'aborted_at': A timestamp in seconds since the epoch for when this test
was aborted. If the test completed successfully,
this value is 0.
'finished_at': A timestamp in seconds since the epoch for when this
test was successfully complete. If the test is aborted,
or running, this value is 0.
'expectations': A dictionary with the test expectations. See
ParseExpectation() for the structure.
}
"""
cmdline = [compiler]
cmdline.extend(cflags)
name = config['name']
expectations = config['expectations']
if expectations is not None:
cmdline.append('-D%s' % name)
cmdline.extend(['-o', '/dev/null', '-c', '-x', 'c++',
sourcefile_path])
test_stdout = tempfile.TemporaryFile(dir=tempfile_dir)
test_stderr = tempfile.TemporaryFile(dir=tempfile_dir)
process = subprocess.Popen(cmdline, stdout=test_stdout, stderr=test_stderr)
now = time.time()
return {'proc': process,
'cmdline': ' '.join(cmdline),
'stdout': test_stdout,
'stderr': test_stderr,
'name': name,<|fim▁hole|> 'kill_timeout': now + NCTEST_KILL_TIMEOUT_SEC,
'started_at': now,
'aborted_at': 0,
'finished_at': 0,
'expectations': expectations}
def PassTest(resultfile, resultlog, test):
"""Logs the result of a test started by StartTest(), or a disabled test
configuration.
Args:
resultfile: File object for .cc file that results are written to.
resultlog: File object for the log file.
test: An instance of the dictionary returned by StartTest(), a
configuration from ExtractTestConfigs().
"""
resultfile.write(GUNIT_TEMPLATE % (
test['suite_name'], test['name']))
# The 'started_at' key is only added if a test has been started.
if 'started_at' in test:
resultlog.write(LOG_TEMPLATE % (
test['suite_name'], test['name'],
test['finished_at'] - test['started_at'],
test['started_at'], test['finished_at']))
def FailTest(resultfile, test, error, stdout=None, stderr=None):
"""Logs the result of a test started by StartTest()
Args:
resultfile: File object for .cc file that results are written to.
test: An instance of the dictionary returned by StartTest()
error: The printable reason for the failure.
stdout: The test's output to stdout.
stderr: The test's output to stderr.
"""
resultfile.write('#error "%s Failed: %s"\n' % (test['name'], error))
resultfile.write('#error "compile line: %s"\n' % test['cmdline'])
if stdout and len(stdout) != 0:
resultfile.write('#error "%s stdout:"\n' % test['name'])
for line in stdout.split('\n'):
resultfile.write('#error " %s:"\n' % line)
if stderr and len(stderr) != 0:
resultfile.write('#error "%s stderr:"\n' % test['name'])
for line in stderr.split('\n'):
resultfile.write('#error " %s"\n' % line)
resultfile.write('\n')
def WriteStats(resultlog, suite_name, timings):
"""Logs the peformance timings for each stage of the script.
Args:
resultlog: File object for the log file.
suite_name: The name of the GUnit suite this test belongs to.
timings: Dictionary with timestamps for each stage of the script run.
"""
stats_template = """
TEST(%s): Started %f, Ended %f, Total %fs, Extract %fs, Compile %fs, Process %fs
"""
total_secs = timings['results_processed'] - timings['started']
extract_secs = timings['extract_done'] - timings['started']
compile_secs = timings['compile_done'] - timings['extract_done']
process_secs = timings['results_processed'] - timings['compile_done']
resultlog.write(stats_template % (
suite_name, timings['started'], timings['results_processed'], total_secs,
extract_secs, compile_secs, process_secs))
def ExtractTestOutputAndCleanup(test):
"""Test output is in temp files. Read those and delete them.
Returns: A tuple (stderr, stdout).
"""
outputs = [None, None]
for i, stream_name in ((0, "stdout"), (1, "stderr")):
stream = test[stream_name]
stream.seek(0)
outputs[i] = stream.read()
stream.close()
return outputs
def ProcessTestResult(resultfile, resultlog, test):
"""Interprets and logs the result of a test started by StartTest()
Args:
resultfile: File object for .cc file that results are written to.
resultlog: File object for the log file.
test: The dictionary from StartTest() to process.
"""
proc = test['proc']
proc.wait()
(stdout, stderr) = ExtractTestOutputAndCleanup(test)
if test['aborted_at'] != 0:
FailTest(resultfile, test, "Compile timed out. Started %f ended %f." %
(test['started_at'], test['aborted_at']))
return
if proc.poll() == 0:
# Handle failure due to successful compile.
FailTest(resultfile, test,
'Unexpected successful compilation.',
stdout, stderr)
return
else:
# Check the output has the right expectations. If there are no
# expectations, then we just consider the output "matched" by default.
if len(test['expectations']) == 0:
PassTest(resultfile, resultlog, test)
return
# Otherwise test against all expectations.
for regexp in test['expectations']:
if (regexp.search(stdout) is not None or
regexp.search(stderr) is not None):
PassTest(resultfile, resultlog, test)
return
expectation_str = ', '.join(
["r'%s'" % regexp.pattern for regexp in test['expectations']])
FailTest(resultfile, test,
'Expectations [%s] did not match output.' % expectation_str,
stdout, stderr)
return
def CompleteAtLeastOneTest(executing_tests):
"""Blocks until at least one task is removed from executing_tests.
This function removes completed tests from executing_tests, logging failures
and output. If no tests can be removed, it will enter a poll-loop until one
test finishes or times out. On a timeout, this function is responsible for
terminating the process in the appropriate fashion.
Args:
executing_tests: A dict mapping a string containing the test name to the
test dict return from StartTest().
Returns:
A list of tests that have finished.
"""
finished_tests = []
busy_loop_timeout = time.time() + BUSY_LOOP_MAX_TIME_SEC
while len(finished_tests) == 0:
# If we don't make progress for too long, assume the code is just dead.
assert busy_loop_timeout > time.time()
# Select on the output files to block until we have something to
# do. We ignore the return value from select and just poll all
# processes.
read_set = []
for test in executing_tests.values():
read_set.extend([test['stdout'], test['stderr']])
select.select(read_set, [], read_set, NCTEST_TERMINATE_TIMEOUT_SEC)
# Now attempt to process results.
now = time.time()
for test in executing_tests.values():
proc = test['proc']
if proc.poll() is not None:
test['finished_at'] = now
finished_tests.append(test)
elif test['terminate_timeout'] < now:
proc.terminate()
test['aborted_at'] = now
elif test['kill_timeout'] < now:
proc.kill()
test['aborted_at'] = now
if len(finished_tests) == 0:
# We had output from some process but no process had
# finished. To avoid busy looping while waiting for a process to
# finish, insert a small 100 ms delay here.
time.sleep(0.1)
for test in finished_tests:
del executing_tests[test['name']]
return finished_tests
def main():
if len(sys.argv) < 6 or sys.argv[5] != '--':
print('Usage: %s <compiler> <parallelism> <sourcefile> <resultfile> '
'-- <cflags...>' % sys.argv[0])
sys.exit(1)
# Force us into the "C" locale so the compiler doesn't localize its output.
# In particular, this stops gcc from using smart quotes when in english UTF-8
# locales. This makes the expectation writing much easier.
os.environ['LC_ALL'] = 'C'
compiler = sys.argv[1]
parallelism = int(sys.argv[2])
sourcefile_path = sys.argv[3]
resultfile_path = sys.argv[4]
cflags = sys.argv[6:]
timings = {'started': time.time()}
ValidateInput(compiler, parallelism, sourcefile_path, cflags, resultfile_path)
# Convert filename from underscores to CamelCase.
words = os.path.splitext(os.path.basename(sourcefile_path))[0].split('_')
words = [w.capitalize() for w in words]
suite_name = 'NoCompile' + ''.join(words)
test_configs = ExtractTestConfigs(sourcefile_path, suite_name)
timings['extract_done'] = time.time()
resultfile = StringIO.StringIO()
resultlog = StringIO.StringIO()
resultfile.write(RESULT_FILE_HEADER % sourcefile_path)
# Run the no-compile tests, but ensure we do not run more than |parallelism|
# tests at once.
timings['header_written'] = time.time()
executing_tests = {}
finished_tests = []
cflags.extend(['-MMD', '-MF', resultfile_path + '.d', '-MT', resultfile_path])
test = StartTest(
compiler,
sourcefile_path,
os.path.dirname(resultfile_path),
cflags,
{ 'name': 'NCTEST_SANITY',
'suite_name': suite_name,
'expectations': None,
})
executing_tests[test['name']] = test
for config in test_configs:
# CompleteAtLeastOneTest blocks until at least one test finishes. Thus, this
# acts as a semaphore. We cannot use threads + a real semaphore because
# subprocess forks, which can cause all sorts of hilarity with threads.
if len(executing_tests) >= parallelism:
finished_tests.extend(CompleteAtLeastOneTest(executing_tests))
if config['name'].startswith('DISABLED_'):
PassTest(resultfile, resultlog, config)
else:
test = StartTest(compiler, sourcefile_path,
os.path.dirname(resultfile_path), cflags, config)
assert test['name'] not in executing_tests
executing_tests[test['name']] = test
# If there are no more test to start, we still need to drain the running
# ones.
while len(executing_tests) > 0:
finished_tests.extend(CompleteAtLeastOneTest(executing_tests))
timings['compile_done'] = time.time()
finished_tests = sorted(finished_tests, key=lambda test: test['name'])
for test in finished_tests:
if test['name'] == 'NCTEST_SANITY':
test['proc'].wait()
(stdout, stderr) = ExtractTestOutputAndCleanup(test)
return_code = test['proc'].returncode
if return_code != 0:
sys.stdout.write(stdout)
sys.stderr.write(stderr)
continue
ProcessTestResult(resultfile, resultlog, test)
timings['results_processed'] = time.time()
WriteStats(resultlog, suite_name, timings)
with open(resultfile_path + '.log', 'w') as fd:
fd.write(resultlog.getvalue())
if return_code == 0:
with open(resultfile_path, 'w') as fd:
fd.write(resultfile.getvalue())
resultfile.close()
if return_code != 0:
print("No-compile driver failure with return_code %d. Result log:" %
return_code)
print(resultlog.getvalue())
sys.exit(return_code)
if __name__ == '__main__':
main()<|fim▁end|> | 'suite_name': config['suite_name'],
'terminate_timeout': now + NCTEST_TERMINATE_TIMEOUT_SEC, |
<|file_name|>drop.rs<|end_file_name|><|fim▁begin|>struct Droppable {
name: &'static str,
}
// This trivial implementation of `drop` adds a print to console.
impl Drop for Droppable {
fn drop(&mut self) {
println!("> Dropping {}", self.name);
}
}
fn main() {
let _a = Droppable { name: "a" };
// block A
{
let _b = Droppable { name: "b" };
// block B<|fim▁hole|> let _c = Droppable { name: "c" };
let _d = Droppable { name: "d" };
println!("Exiting block B");
}
println!("Just exited block B");
println!("Exiting block A");
}
println!("Just exited block A");
// Variable can be manually dropped using the `drop` function
drop(_a);
// TODO ^ Try commenting this line
println!("end of the main function");
// `_a` *won't* be `drop`ed again here, because it already has been
// (manually) `drop`ed
}<|fim▁end|> | { |
<|file_name|>S163_L641_MissingRange.py<|end_file_name|><|fim▁begin|>'''
Given a sorted integer array where the range of elements are in the inclusive range [lower, upper], return its missing ranges.
Have you met this question in a real interview?
Example
Example 1
Input:
nums = [0, 1, 3, 50, 75], lower = 0 and upper = 99
Output:
["2", "4->49", "51->74", "76->99"]
Explanation:
in range[0,99],the missing range includes:range[2,2],range[4,49],range[51,74] and range[76,99]
Example 2
Input:
nums = [0, 1, 2, 3, 7], lower = 0 and upper = 7
Output:
["4->6"]
Explanation:
in range[0,7],the missing range include range[4,6]
'''
class Solution:
"""
@param: nums: a sorted integer array
@param: lower: An integer
@param: upper: An integer
@return: a list of its missing ranges
"""
def findMissingRanges(self, nums, lower, upper):
# write your code here
r=[]
prev = lower
nums+=[upper+1]
for cur in nums:
if cur - prev >= 2:
r += [str(prev) + '->' + str(cur-1)]
elif cur - prev == 1:
r += [str(cur-1)]
prev = cur + 1<|fim▁hole|><|fim▁end|> | return r |
<|file_name|>DataControl.js<|end_file_name|><|fim▁begin|>class DataControl {
constructor() {
this.appData
updateData()
}
updateData() {
this.appData = fetcherama()<|fim▁hole|> lib.fetch(`http://localhost:8080/api/class/getNearbyClasses/${coor.long}/${coor.lat}`, opt, data => {
if (data.success === true) {
return data.classes
}
})
}
}
export default DataControl<|fim▁end|> | }
fetcherama() { |
<|file_name|>extern.rs<|end_file_name|><|fim▁begin|>#[repr(C)]
struct Normal {
x: i32,
y: f32,
}<|fim▁hole|>
fn bar(a: Normal);
}<|fim▁end|> |
extern "C" {
fn foo() -> i32; |
<|file_name|>test_freebase_support_query.py<|end_file_name|><|fim▁begin|>#
# Copyright 2012 Markus Pielmeier
#
# This file is part of tagfs.
#<|fim▁hole|># it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
import unittest
import tagfs.freebase_support as freebase_support
class WhenQueryWithOneFilerAndOneSelector(unittest.TestCase):
def setUp(self):
super(WhenQueryWithOneFilerAndOneSelector, self).setUp()
self.query = freebase_support.Query({'filter': 'filterValue', 'selector': None, })
def testThenSelectedKeysIsSelector(self):
self.assertEqual(list(self.query.selectedKeys), ['selector',])
def testThenQueryStringIs(self):
self.assertEqual(self.query.queryString, '{"filter":"filterValue","selector":[]}')<|fim▁end|> | # tagfs is free software: you can redistribute it and/or modify |
<|file_name|>storageprovisioner_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package storageprovisioner_test
import (
"sort"
"github.com/juju/errors"
"github.com/juju/names"
jc "github.com/juju/testing/checkers"
gc "gopkg.in/check.v1"
"github.com/juju/juju/apiserver/common"
"github.com/juju/juju/apiserver/params"
"github.com/juju/juju/apiserver/storageprovisioner"
apiservertesting "github.com/juju/juju/apiserver/testing"
"github.com/juju/juju/environs/tags"
"github.com/juju/juju/instance"
jujutesting "github.com/juju/juju/juju/testing"
"github.com/juju/juju/state"
statetesting "github.com/juju/juju/state/testing"
"github.com/juju/juju/storage"
"github.com/juju/juju/storage/provider/dummy"
"github.com/juju/juju/storage/provider/registry"
"github.com/juju/juju/testing"
"github.com/juju/juju/testing/factory"
)
var _ = gc.Suite(&provisionerSuite{})
type provisionerSuite struct {
// TODO(wallyworld) remove JujuConnSuite
jujutesting.JujuConnSuite
factory *factory.Factory
resources *common.Resources
authorizer *apiservertesting.FakeAuthorizer
api *storageprovisioner.StorageProvisionerAPI
}
func (s *provisionerSuite) SetUpSuite(c *gc.C) {
s.JujuConnSuite.SetUpSuite(c)
registry.RegisterProvider("environscoped", &dummy.StorageProvider{
StorageScope: storage.ScopeEnviron,
})
registry.RegisterProvider("machinescoped", &dummy.StorageProvider{
StorageScope: storage.ScopeMachine,
})
registry.RegisterEnvironStorageProviders(
"dummy", "environscoped", "machinescoped",
)
s.AddCleanup(func(c *gc.C) {
registry.RegisterProvider("environscoped", nil)
registry.RegisterProvider("machinescoped", nil)
})
}
func (s *provisionerSuite) SetUpTest(c *gc.C) {
s.JujuConnSuite.SetUpTest(c)
s.factory = factory.NewFactory(s.State)
s.resources = common.NewResources()
// Create the resource registry separately to track invocations to
// Register.
s.resources = common.NewResources()
s.AddCleanup(func(_ *gc.C) { s.resources.StopAll() })
var err error
s.authorizer = &apiservertesting.FakeAuthorizer{
Tag: names.NewMachineTag("0"),
EnvironManager: true,
}
s.api, err = storageprovisioner.NewStorageProvisionerAPI(s.State, s.resources, s.authorizer)
c.Assert(err, jc.ErrorIsNil)
}
func (s *provisionerSuite) TestNewStorageProvisionerAPINonMachine(c *gc.C) {
tag := names.NewUnitTag("mysql/0")
authorizer := &apiservertesting.FakeAuthorizer{Tag: tag}
_, err := storageprovisioner.NewStorageProvisionerAPI(s.State, common.NewResources(), authorizer)
c.Assert(err, gc.ErrorMatches, "permission denied")
}
func (s *provisionerSuite) setupVolumes(c *gc.C) {
s.factory.MakeMachine(c, &factory.MachineParams{
InstanceId: instance.Id("inst-id"),
Volumes: []state.MachineVolumeParams{
{Volume: state.VolumeParams{Pool: "machinescoped", Size: 1024}},
{Volume: state.VolumeParams{Pool: "environscoped", Size: 2048}},
{Volume: state.VolumeParams{Pool: "environscoped", Size: 4096}},
{
Volume: state.VolumeParams{Pool: "environscoped", Size: 4096},
Attachment: state.VolumeAttachmentParams{
ReadOnly: true,
},
},
},
})
// Only provision the first and third volumes.
err := s.State.SetVolumeInfo(names.NewVolumeTag("0/0"), state.VolumeInfo{
HardwareId: "123",
VolumeId: "abc",
Size: 1024,
Persistent: true,
})
c.Assert(err, jc.ErrorIsNil)
err = s.State.SetVolumeInfo(names.NewVolumeTag("2"), state.VolumeInfo{
HardwareId: "456",
VolumeId: "def",
Size: 4096,
})
c.Assert(err, jc.ErrorIsNil)
// Make a machine without storage for tests to use.
s.factory.MakeMachine(c, nil)
// Make an unprovisioned machine with storage for tests to use.
// TODO(axw) extend testing/factory to allow creating unprovisioned
// machines.
_, err = s.State.AddOneMachine(state.MachineTemplate{
Series: "quantal",
Jobs: []state.MachineJob{state.JobHostUnits},
Volumes: []state.MachineVolumeParams{
{Volume: state.VolumeParams{Pool: "environscoped", Size: 2048}},
},
})
c.Assert(err, jc.ErrorIsNil)
}
func (s *provisionerSuite) setupFilesystems(c *gc.C) {
s.factory.MakeMachine(c, &factory.MachineParams{
InstanceId: instance.Id("inst-id"),
Filesystems: []state.MachineFilesystemParams{{
Filesystem: state.FilesystemParams{Pool: "machinescoped", Size: 1024},
Attachment: state.FilesystemAttachmentParams{
Location: "/srv",
ReadOnly: true,
},
}, {
Filesystem: state.FilesystemParams{Pool: "environscoped", Size: 2048},
}, {
Filesystem: state.FilesystemParams{Pool: "environscoped", Size: 4096},
}},
})
// Only provision the first and third filesystems.
err := s.State.SetFilesystemInfo(names.NewFilesystemTag("0/0"), state.FilesystemInfo{
FilesystemId: "abc",
Size: 1024,
})
c.Assert(err, jc.ErrorIsNil)
err = s.State.SetFilesystemInfo(names.NewFilesystemTag("2"), state.FilesystemInfo{
FilesystemId: "def",
Size: 4096,
})
c.Assert(err, jc.ErrorIsNil)
// Make a machine without storage for tests to use.
s.factory.MakeMachine(c, nil)
// Make an unprovisioned machine with storage for tests to use.
// TODO(axw) extend testing/factory to allow creating unprovisioned
// machines.
_, err = s.State.AddOneMachine(state.MachineTemplate{
Series: "quantal",
Jobs: []state.MachineJob{state.JobHostUnits},
Filesystems: []state.MachineFilesystemParams{{
Filesystem: state.FilesystemParams{Pool: "environscoped", Size: 2048},
}},
})
c.Assert(err, jc.ErrorIsNil)
}
func (s *provisionerSuite) TestVolumesMachine(c *gc.C) {
s.setupVolumes(c)
s.authorizer.EnvironManager = false
results, err := s.api.Volumes(params.Entities{
Entities: []params.Entity{{"volume-0-0"}, {"volume-1"}, {"volume-42"}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, gc.DeepEquals, params.VolumeResults{
Results: []params.VolumeResult{
{Result: params.Volume{
VolumeTag: "volume-0-0",
Info: params.VolumeInfo{
VolumeId: "abc",
HardwareId: "123",
Size: 1024,
Persistent: true,
},
}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestVolumesEnviron(c *gc.C) {
s.setupVolumes(c)
s.authorizer.Tag = names.NewMachineTag("2") // neither 0 nor 1
results, err := s.api.Volumes(params.Entities{
Entities: []params.Entity{
{"volume-0-0"},
{"volume-1"},
{"volume-2"},
{"volume-42"},
},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, gc.DeepEquals, params.VolumeResults{
Results: []params.VolumeResult{
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: common.ServerError(errors.NotProvisionedf(`volume "1"`))},
{Result: params.Volume{
VolumeTag: "volume-2",
Info: params.VolumeInfo{
VolumeId: "def",
HardwareId: "456",
Size: 4096,
},
}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestVolumesEmptyArgs(c *gc.C) {
results, err := s.api.Volumes(params.Entities{})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results.Results, gc.HasLen, 0)
}
func (s *provisionerSuite) TestFilesystems(c *gc.C) {
s.setupFilesystems(c)
s.authorizer.Tag = names.NewMachineTag("2") // neither 0 nor 1
results, err := s.api.Filesystems(params.Entities{
Entities: []params.Entity{
{"filesystem-0-0"},
{"filesystem-1"},
{"filesystem-2"},
{"filesystem-42"},
},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.FilesystemResults{
Results: []params.FilesystemResult{
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: common.ServerError(errors.NotProvisionedf(`filesystem "1"`))},
{Result: params.Filesystem{
FilesystemTag: "filesystem-2",
Info: params.FilesystemInfo{
FilesystemId: "def",
Size: 4096,
},
}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestVolumeAttachments(c *gc.C) {
s.setupVolumes(c)
s.authorizer.EnvironManager = false
err := s.State.SetVolumeAttachmentInfo(
names.NewMachineTag("0"),
names.NewVolumeTag("0/0"),
state.VolumeAttachmentInfo{DeviceName: "xvdf1"},
)
c.Assert(err, jc.ErrorIsNil)
results, err := s.api.VolumeAttachments(params.MachineStorageIds{
Ids: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "volume-0-0",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-2", // volume attachment not provisioned
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-42",
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.VolumeAttachmentResults{
Results: []params.VolumeAttachmentResult{
{Result: params.VolumeAttachment{
VolumeTag: "volume-0-0",
MachineTag: "machine-0",
Info: params.VolumeAttachmentInfo{
DeviceName: "xvdf1",
},
}},
{Error: ¶ms.Error{
Code: params.CodeNotProvisioned,
Message: `volume attachment "2" on "0" not provisioned`,
}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestFilesystemAttachments(c *gc.C) {
s.setupFilesystems(c)
s.authorizer.EnvironManager = false
err := s.State.SetFilesystemAttachmentInfo(
names.NewMachineTag("0"),
names.NewFilesystemTag("0/0"),
state.FilesystemAttachmentInfo{
MountPoint: "/srv",
ReadOnly: true,
},
)
c.Assert(err, jc.ErrorIsNil)
results, err := s.api.FilesystemAttachments(params.MachineStorageIds{
Ids: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "filesystem-0-0",
}, {
MachineTag: "machine-0",
AttachmentTag: "filesystem-2", // filesystem attachment not provisioned
}, {
MachineTag: "machine-0",
AttachmentTag: "filesystem-42",
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.FilesystemAttachmentResults{
Results: []params.FilesystemAttachmentResult{
{Result: params.FilesystemAttachment{
FilesystemTag: "filesystem-0-0",
MachineTag: "machine-0",
Info: params.FilesystemAttachmentInfo{
MountPoint: "/srv",
ReadOnly: true,
},
}},
{Error: ¶ms.Error{
Code: params.CodeNotProvisioned,<|fim▁hole|> }},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestVolumeParams(c *gc.C) {
s.setupVolumes(c)
results, err := s.api.VolumeParams(params.Entities{
Entities: []params.Entity{
{"volume-0-0"},
{"volume-1"},
{"volume-3"},
{"volume-42"},
},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.VolumeParamsResults{
Results: []params.VolumeParamsResult{
{Result: params.VolumeParams{
VolumeTag: "volume-0-0",
Size: 1024,
Provider: "machinescoped",
Tags: map[string]string{
tags.JujuController: testing.ModelTag.Id(),
tags.JujuModel: testing.ModelTag.Id(),
},
Attachment: ¶ms.VolumeAttachmentParams{
MachineTag: "machine-0",
VolumeTag: "volume-0-0",
Provider: "machinescoped",
InstanceId: "inst-id",
},
}},
{Result: params.VolumeParams{
VolumeTag: "volume-1",
Size: 2048,
Provider: "environscoped",
Tags: map[string]string{
tags.JujuController: testing.ModelTag.Id(),
tags.JujuModel: testing.ModelTag.Id(),
},
Attachment: ¶ms.VolumeAttachmentParams{
MachineTag: "machine-0",
VolumeTag: "volume-1",
Provider: "environscoped",
InstanceId: "inst-id",
},
}},
{Result: params.VolumeParams{
VolumeTag: "volume-3",
Size: 4096,
Provider: "environscoped",
Tags: map[string]string{
tags.JujuController: testing.ModelTag.Id(),
tags.JujuModel: testing.ModelTag.Id(),
},
Attachment: ¶ms.VolumeAttachmentParams{
MachineTag: "machine-0",
VolumeTag: "volume-3",
Provider: "environscoped",
InstanceId: "inst-id",
ReadOnly: true,
},
}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestVolumeParamsEmptyArgs(c *gc.C) {
results, err := s.api.VolumeParams(params.Entities{})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results.Results, gc.HasLen, 0)
}
func (s *provisionerSuite) TestFilesystemParams(c *gc.C) {
s.setupFilesystems(c)
results, err := s.api.FilesystemParams(params.Entities{
Entities: []params.Entity{{"filesystem-0-0"}, {"filesystem-1"}, {"filesystem-42"}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.FilesystemParamsResults{
Results: []params.FilesystemParamsResult{
{Result: params.FilesystemParams{
FilesystemTag: "filesystem-0-0",
Size: 1024,
Provider: "machinescoped",
Tags: map[string]string{
tags.JujuController: testing.ModelTag.Id(),
tags.JujuModel: testing.ModelTag.Id(),
},
}},
{Result: params.FilesystemParams{
FilesystemTag: "filesystem-1",
Size: 2048,
Provider: "environscoped",
Tags: map[string]string{
tags.JujuController: testing.ModelTag.Id(),
tags.JujuModel: testing.ModelTag.Id(),
},
}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestVolumeAttachmentParams(c *gc.C) {
s.setupVolumes(c)
err := s.State.SetVolumeInfo(names.NewVolumeTag("3"), state.VolumeInfo{
HardwareId: "123",
VolumeId: "xyz",
Size: 1024,
})
c.Assert(err, jc.ErrorIsNil)
err = s.State.SetVolumeAttachmentInfo(
names.NewMachineTag("0"),
names.NewVolumeTag("3"),
state.VolumeAttachmentInfo{
DeviceName: "xvdf1",
ReadOnly: true,
},
)
c.Assert(err, jc.ErrorIsNil)
results, err := s.api.VolumeAttachmentParams(params.MachineStorageIds{
Ids: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "volume-0-0",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-1",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-3",
}, {
MachineTag: "machine-2",
AttachmentTag: "volume-4",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-42",
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.VolumeAttachmentParamsResults{
Results: []params.VolumeAttachmentParamsResult{
{Result: params.VolumeAttachmentParams{
MachineTag: "machine-0",
VolumeTag: "volume-0-0",
InstanceId: "inst-id",
VolumeId: "abc",
Provider: "machinescoped",
}},
{Result: params.VolumeAttachmentParams{
MachineTag: "machine-0",
VolumeTag: "volume-1",
InstanceId: "inst-id",
Provider: "environscoped",
}},
{Result: params.VolumeAttachmentParams{
MachineTag: "machine-0",
VolumeTag: "volume-3",
InstanceId: "inst-id",
VolumeId: "xyz",
Provider: "environscoped",
ReadOnly: true,
}},
{Result: params.VolumeAttachmentParams{
MachineTag: "machine-2",
VolumeTag: "volume-4",
Provider: "environscoped",
}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestFilesystemAttachmentParams(c *gc.C) {
s.setupFilesystems(c)
err := s.State.SetFilesystemInfo(names.NewFilesystemTag("1"), state.FilesystemInfo{
FilesystemId: "fsid",
Size: 1024,
})
c.Assert(err, jc.ErrorIsNil)
err = s.State.SetFilesystemAttachmentInfo(
names.NewMachineTag("0"),
names.NewFilesystemTag("1"),
state.FilesystemAttachmentInfo{
MountPoint: "/in/the/place",
},
)
c.Assert(err, jc.ErrorIsNil)
results, err := s.api.FilesystemAttachmentParams(params.MachineStorageIds{
Ids: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "filesystem-0-0",
}, {
MachineTag: "machine-0",
AttachmentTag: "filesystem-1",
}, {
MachineTag: "machine-2",
AttachmentTag: "filesystem-3",
}, {
MachineTag: "machine-0",
AttachmentTag: "filesystem-42",
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.FilesystemAttachmentParamsResults{
Results: []params.FilesystemAttachmentParamsResult{
{Result: params.FilesystemAttachmentParams{
MachineTag: "machine-0",
FilesystemTag: "filesystem-0-0",
InstanceId: "inst-id",
FilesystemId: "abc",
Provider: "machinescoped",
MountPoint: "/srv",
ReadOnly: true,
}},
{Result: params.FilesystemAttachmentParams{
MachineTag: "machine-0",
FilesystemTag: "filesystem-1",
InstanceId: "inst-id",
FilesystemId: "fsid",
Provider: "environscoped",
MountPoint: "/in/the/place",
}},
{Result: params.FilesystemAttachmentParams{
MachineTag: "machine-2",
FilesystemTag: "filesystem-3",
Provider: "environscoped",
}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestSetVolumeAttachmentInfo(c *gc.C) {
s.setupVolumes(c)
err := s.State.SetVolumeInfo(names.NewVolumeTag("4"), state.VolumeInfo{
VolumeId: "whatever",
Size: 1024,
})
c.Assert(err, jc.ErrorIsNil)
results, err := s.api.SetVolumeAttachmentInfo(params.VolumeAttachments{
VolumeAttachments: []params.VolumeAttachment{{
MachineTag: "machine-0",
VolumeTag: "volume-0-0",
Info: params.VolumeAttachmentInfo{
DeviceName: "sda",
ReadOnly: true,
},
}, {
MachineTag: "machine-0",
VolumeTag: "volume-1",
Info: params.VolumeAttachmentInfo{
DeviceName: "sdb",
},
}, {
MachineTag: "machine-2",
VolumeTag: "volume-4",
Info: params.VolumeAttachmentInfo{
DeviceName: "sdc",
},
}, {
MachineTag: "machine-0",
VolumeTag: "volume-42",
Info: params.VolumeAttachmentInfo{
DeviceName: "sdd",
},
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{},
{Error: ¶ms.Error{Message: `cannot set info for volume attachment 1:0: volume "1" not provisioned`, Code: "not provisioned"}},
{Error: ¶ms.Error{Message: `cannot set info for volume attachment 4:2: machine 2 not provisioned`, Code: "not provisioned"}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestSetFilesystemAttachmentInfo(c *gc.C) {
s.setupFilesystems(c)
err := s.State.SetFilesystemInfo(names.NewFilesystemTag("3"), state.FilesystemInfo{
FilesystemId: "whatever",
Size: 1024,
})
c.Assert(err, jc.ErrorIsNil)
results, err := s.api.SetFilesystemAttachmentInfo(params.FilesystemAttachments{
FilesystemAttachments: []params.FilesystemAttachment{{
MachineTag: "machine-0",
FilesystemTag: "filesystem-0-0",
Info: params.FilesystemAttachmentInfo{
MountPoint: "/srv/a",
ReadOnly: true,
},
}, {
MachineTag: "machine-0",
FilesystemTag: "filesystem-1",
Info: params.FilesystemAttachmentInfo{
MountPoint: "/srv/b",
},
}, {
MachineTag: "machine-2",
FilesystemTag: "filesystem-3",
Info: params.FilesystemAttachmentInfo{
MountPoint: "/srv/c",
},
}, {
MachineTag: "machine-0",
FilesystemTag: "filesystem-42",
Info: params.FilesystemAttachmentInfo{
MountPoint: "/srv/d",
},
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{},
{Error: ¶ms.Error{Message: `cannot set info for filesystem attachment 1:0: filesystem "1" not provisioned`, Code: "not provisioned"}},
{Error: ¶ms.Error{Message: `cannot set info for filesystem attachment 3:2: machine 2 not provisioned`, Code: "not provisioned"}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestWatchVolumes(c *gc.C) {
s.setupVolumes(c)
s.factory.MakeMachine(c, nil)
c.Assert(s.resources.Count(), gc.Equals, 0)
args := params.Entities{Entities: []params.Entity{
{"machine-0"},
{s.State.ModelTag().String()},
{"environ-adb650da-b77b-4ee8-9cbb-d57a9a592847"},
{"machine-1"},
{"machine-42"}},
}
result, err := s.api.WatchVolumes(args)
c.Assert(err, jc.ErrorIsNil)
sort.Strings(result.Results[1].Changes)
c.Assert(result, jc.DeepEquals, params.StringsWatchResults{
Results: []params.StringsWatchResult{
{StringsWatcherId: "1", Changes: []string{"0/0"}},
{StringsWatcherId: "2", Changes: []string{"1", "2", "3", "4"}},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
},
})
// Verify the resources were registered and stop them when done.
c.Assert(s.resources.Count(), gc.Equals, 2)
v0Watcher := s.resources.Get("1")
defer statetesting.AssertStop(c, v0Watcher)
v1Watcher := s.resources.Get("2")
defer statetesting.AssertStop(c, v1Watcher)
// Check that the Watch has consumed the initial events ("returned" in
// the Watch call)
wc := statetesting.NewStringsWatcherC(c, s.State, v0Watcher.(state.StringsWatcher))
wc.AssertNoChange()
wc = statetesting.NewStringsWatcherC(c, s.State, v1Watcher.(state.StringsWatcher))
wc.AssertNoChange()
}
func (s *provisionerSuite) TestWatchVolumeAttachments(c *gc.C) {
s.setupVolumes(c)
s.factory.MakeMachine(c, nil)
c.Assert(s.resources.Count(), gc.Equals, 0)
args := params.Entities{Entities: []params.Entity{
{"machine-0"},
{s.State.ModelTag().String()},
{"environ-adb650da-b77b-4ee8-9cbb-d57a9a592847"},
{"machine-1"},
{"machine-42"}},
}
result, err := s.api.WatchVolumeAttachments(args)
c.Assert(err, jc.ErrorIsNil)
sort.Sort(byMachineAndEntity(result.Results[0].Changes))
sort.Sort(byMachineAndEntity(result.Results[1].Changes))
c.Assert(result, jc.DeepEquals, params.MachineStorageIdsWatchResults{
Results: []params.MachineStorageIdsWatchResult{
{
MachineStorageIdsWatcherId: "1",
Changes: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "volume-0-0",
}},
},
{
MachineStorageIdsWatcherId: "2",
Changes: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "volume-1",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-2",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-3",
}, {
MachineTag: "machine-2",
AttachmentTag: "volume-4",
}},
},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
},
})
// Verify the resources were registered and stop them when done.
c.Assert(s.resources.Count(), gc.Equals, 2)
v0Watcher := s.resources.Get("1")
defer statetesting.AssertStop(c, v0Watcher)
v1Watcher := s.resources.Get("2")
defer statetesting.AssertStop(c, v1Watcher)
// Check that the Watch has consumed the initial events ("returned" in
// the Watch call)
wc := statetesting.NewStringsWatcherC(c, s.State, v0Watcher.(state.StringsWatcher))
wc.AssertNoChange()
wc = statetesting.NewStringsWatcherC(c, s.State, v1Watcher.(state.StringsWatcher))
wc.AssertNoChange()
}
func (s *provisionerSuite) TestWatchFilesystems(c *gc.C) {
s.setupFilesystems(c)
c.Assert(s.resources.Count(), gc.Equals, 0)
args := params.Entities{Entities: []params.Entity{
{"machine-0"},
{s.State.ModelTag().String()},
{"environ-adb650da-b77b-4ee8-9cbb-d57a9a592847"},
{"machine-1"},
{"machine-42"}},
}
result, err := s.api.WatchFilesystems(args)
c.Assert(err, jc.ErrorIsNil)
sort.Strings(result.Results[1].Changes)
c.Assert(result, jc.DeepEquals, params.StringsWatchResults{
Results: []params.StringsWatchResult{
{
StringsWatcherId: "1",
Changes: []string{"0/0"},
},
{
StringsWatcherId: "2",
Changes: []string{"1", "2", "3"},
},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
},
})
// Verify the resources were registered and stop them when done.
c.Assert(s.resources.Count(), gc.Equals, 2)
v0Watcher := s.resources.Get("1")
defer statetesting.AssertStop(c, v0Watcher)
v1Watcher := s.resources.Get("2")
defer statetesting.AssertStop(c, v1Watcher)
// Check that the Watch has consumed the initial events ("returned" in
// the Watch call)
wc := statetesting.NewStringsWatcherC(c, s.State, v0Watcher.(state.StringsWatcher))
wc.AssertNoChange()
wc = statetesting.NewStringsWatcherC(c, s.State, v1Watcher.(state.StringsWatcher))
wc.AssertNoChange()
}
func (s *provisionerSuite) TestWatchFilesystemAttachments(c *gc.C) {
s.setupFilesystems(c)
c.Assert(s.resources.Count(), gc.Equals, 0)
args := params.Entities{Entities: []params.Entity{
{"machine-0"},
{s.State.ModelTag().String()},
{"environ-adb650da-b77b-4ee8-9cbb-d57a9a592847"},
{"machine-1"},
{"machine-42"}},
}
result, err := s.api.WatchFilesystemAttachments(args)
c.Assert(err, jc.ErrorIsNil)
sort.Sort(byMachineAndEntity(result.Results[0].Changes))
sort.Sort(byMachineAndEntity(result.Results[1].Changes))
c.Assert(result, jc.DeepEquals, params.MachineStorageIdsWatchResults{
Results: []params.MachineStorageIdsWatchResult{
{
MachineStorageIdsWatcherId: "1",
Changes: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "filesystem-0-0",
}},
},
{
MachineStorageIdsWatcherId: "2",
Changes: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "filesystem-1",
}, {
MachineTag: "machine-0",
AttachmentTag: "filesystem-2",
}, {
MachineTag: "machine-2",
AttachmentTag: "filesystem-3",
}},
},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
},
})
// Verify the resources were registered and stop them when done.
c.Assert(s.resources.Count(), gc.Equals, 2)
v0Watcher := s.resources.Get("1")
defer statetesting.AssertStop(c, v0Watcher)
v1Watcher := s.resources.Get("2")
defer statetesting.AssertStop(c, v1Watcher)
// Check that the Watch has consumed the initial events ("returned" in
// the Watch call)
wc := statetesting.NewStringsWatcherC(c, s.State, v0Watcher.(state.StringsWatcher))
wc.AssertNoChange()
wc = statetesting.NewStringsWatcherC(c, s.State, v1Watcher.(state.StringsWatcher))
wc.AssertNoChange()
}
func (s *provisionerSuite) TestWatchBlockDevices(c *gc.C) {
s.factory.MakeMachine(c, nil)
c.Assert(s.resources.Count(), gc.Equals, 0)
args := params.Entities{Entities: []params.Entity{
{"machine-0"},
{"service-mysql"},
{"machine-1"},
{"machine-42"}},
}
results, err := s.api.WatchBlockDevices(args)
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.NotifyWatchResults{
Results: []params.NotifyWatchResult{
{NotifyWatcherId: "1"},
{Error: ¶ms.Error{Message: `"service-mysql" is not a valid machine tag`}},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
},
})
// Verify the resources were registered and stop them when done.
c.Assert(s.resources.Count(), gc.Equals, 1)
watcher := s.resources.Get("1")
defer statetesting.AssertStop(c, watcher)
// Check that the Watch has consumed the initial event.
wc := statetesting.NewNotifyWatcherC(c, s.State, watcher.(state.NotifyWatcher))
wc.AssertNoChange()
m, err := s.State.Machine("0")
c.Assert(err, jc.ErrorIsNil)
err = m.SetMachineBlockDevices(state.BlockDeviceInfo{
DeviceName: "sda",
Size: 123,
})
c.Assert(err, jc.ErrorIsNil)
wc.AssertOneChange()
}
func (s *provisionerSuite) TestVolumeBlockDevices(c *gc.C) {
s.setupVolumes(c)
s.factory.MakeMachine(c, nil)
err := s.State.SetVolumeAttachmentInfo(
names.NewMachineTag("0"),
names.NewVolumeTag("0/0"),
state.VolumeAttachmentInfo{},
)
c.Assert(err, jc.ErrorIsNil)
machine0, err := s.State.Machine("0")
c.Assert(err, jc.ErrorIsNil)
err = machine0.SetMachineBlockDevices(state.BlockDeviceInfo{
DeviceName: "sda",
Size: 123,
HardwareId: "123", // matches volume-0/0
})
c.Assert(err, jc.ErrorIsNil)
args := params.MachineStorageIds{Ids: []params.MachineStorageId{
{MachineTag: "machine-0", AttachmentTag: "volume-0-0"},
{MachineTag: "machine-0", AttachmentTag: "volume-0-1"},
{MachineTag: "machine-0", AttachmentTag: "volume-0-2"},
{MachineTag: "machine-1", AttachmentTag: "volume-1"},
{MachineTag: "machine-42", AttachmentTag: "volume-42"},
{MachineTag: "service-mysql", AttachmentTag: "volume-1"},
}}
results, err := s.api.VolumeBlockDevices(args)
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.BlockDeviceResults{
Results: []params.BlockDeviceResult{
{Result: storage.BlockDevice{
DeviceName: "sda",
Size: 123,
HardwareId: "123",
}},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
{Error: apiservertesting.ErrUnauthorized},
{Error: ¶ms.Error{Message: `"service-mysql" is not a valid machine tag`}},
},
})
}
func (s *provisionerSuite) TestLife(c *gc.C) {
s.setupVolumes(c)
args := params.Entities{Entities: []params.Entity{{"volume-0-0"}, {"volume-1"}, {"volume-42"}}}
result, err := s.api.Life(args)
c.Assert(err, jc.ErrorIsNil)
c.Assert(result, gc.DeepEquals, params.LifeResults{
Results: []params.LifeResult{
{Life: params.Alive},
{Life: params.Alive},
{Error: common.ServerError(errors.NotFoundf(`volume "42"`))},
},
})
}
func (s *provisionerSuite) TestAttachmentLife(c *gc.C) {
s.setupVolumes(c)
// TODO(axw) test filesystem attachment life
// TODO(axw) test Dying
results, err := s.api.AttachmentLife(params.MachineStorageIds{
Ids: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "volume-0-0",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-1",
}, {
MachineTag: "machine-2",
AttachmentTag: "volume-4",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-42",
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.LifeResults{
Results: []params.LifeResult{
{Life: params.Alive},
{Life: params.Alive},
{Life: params.Alive},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestEnsureDead(c *gc.C) {
s.setupVolumes(c)
args := params.Entities{Entities: []params.Entity{{"volume-0-0"}, {"volume-1"}, {"volume-42"}}}
result, err := s.api.EnsureDead(args)
c.Assert(err, jc.ErrorIsNil)
// TODO(wallyworld) - this test will be updated when EnsureDead is supported
c.Assert(result, gc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{Error: common.ServerError(common.NotSupportedError(names.NewVolumeTag("0/0"), "ensuring death"))},
{Error: common.ServerError(common.NotSupportedError(names.NewVolumeTag("1"), "ensuring death"))},
{Error: common.ServerError(errors.NotFoundf(`volume "42"`))},
},
})
}
func (s *provisionerSuite) TestWatchForModelConfigChanges(c *gc.C) {
result, err := s.api.WatchForModelConfigChanges()
c.Assert(err, jc.ErrorIsNil)
c.Assert(result.NotifyWatcherId, gc.Equals, "1")
// Verify the resource was registered and stop it when done.
c.Assert(s.resources.Count(), gc.Equals, 1)
watcher := s.resources.Get("1")
defer statetesting.AssertStop(c, watcher)
// Check that the Watch has consumed the initial events ("returned" in
// the Watch call)
wc := statetesting.NewNotifyWatcherC(c, s.State, watcher.(state.NotifyWatcher))
wc.AssertNoChange()
// Updating config should trigger the watcher.
err = s.State.UpdateModelConfig(map[string]interface{}{"what": "ever"}, nil, nil)
c.Assert(err, jc.ErrorIsNil)
wc.AssertOneChange()
}
func (s *provisionerSuite) TestModelConfig(c *gc.C) {
stateModelConfig, err := s.State.ModelConfig()
c.Assert(err, jc.ErrorIsNil)
result, err := s.api.ModelConfig()
c.Assert(err, jc.ErrorIsNil)
c.Assert(result.Config, jc.DeepEquals, params.ModelConfig(stateModelConfig.AllAttrs()))
}
func (s *provisionerSuite) TestRemoveVolumesEnvironManager(c *gc.C) {
s.setupVolumes(c)
args := params.Entities{Entities: []params.Entity{
{"volume-1-0"}, {"volume-1"}, {"volume-2"}, {"volume-42"},
{"volume-invalid"}, {"machine-0"},
}}
err := s.State.DetachVolume(names.NewMachineTag("0"), names.NewVolumeTag("1"))
c.Assert(err, jc.ErrorIsNil)
err = s.State.RemoveVolumeAttachment(names.NewMachineTag("0"), names.NewVolumeTag("1"))
c.Assert(err, jc.ErrorIsNil)
err = s.State.DestroyVolume(names.NewVolumeTag("1"))
c.Assert(err, jc.ErrorIsNil)
result, err := s.api.Remove(args)
c.Assert(err, jc.ErrorIsNil)
c.Assert(result, gc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: nil},
{Error: ¶ms.Error{Message: "removing volume 2: volume is not dead"}},
{Error: nil},
{Error: ¶ms.Error{Message: `"volume-invalid" is not a valid volume tag`}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestRemoveFilesystemsEnvironManager(c *gc.C) {
s.setupFilesystems(c)
args := params.Entities{Entities: []params.Entity{
{"filesystem-1-0"}, {"filesystem-1"}, {"filesystem-2"}, {"filesystem-42"},
{"filesystem-invalid"}, {"machine-0"},
}}
err := s.State.DetachFilesystem(names.NewMachineTag("0"), names.NewFilesystemTag("1"))
c.Assert(err, jc.ErrorIsNil)
err = s.State.RemoveFilesystemAttachment(names.NewMachineTag("0"), names.NewFilesystemTag("1"))
c.Assert(err, jc.ErrorIsNil)
err = s.State.DestroyFilesystem(names.NewFilesystemTag("1"))
c.Assert(err, jc.ErrorIsNil)
result, err := s.api.Remove(args)
c.Assert(err, jc.ErrorIsNil)
c.Assert(result, gc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: nil},
{Error: ¶ms.Error{Message: "removing filesystem 2: filesystem is not dead"}},
{Error: nil},
{Error: ¶ms.Error{Message: `"filesystem-invalid" is not a valid filesystem tag`}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestRemoveVolumesMachineAgent(c *gc.C) {
s.setupVolumes(c)
s.authorizer.EnvironManager = false
args := params.Entities{Entities: []params.Entity{
{"volume-0-0"}, {"volume-0-42"}, {"volume-42"},
{"volume-invalid"}, {"machine-0"},
}}
err := s.State.DetachVolume(names.NewMachineTag("0"), names.NewVolumeTag("0/0"))
c.Assert(err, jc.ErrorIsNil)
err = s.State.RemoveVolumeAttachment(names.NewMachineTag("0"), names.NewVolumeTag("0/0"))
c.Assert(err, jc.ErrorIsNil)
err = s.State.DestroyVolume(names.NewVolumeTag("0/0"))
c.Assert(err, jc.ErrorIsNil)
result, err := s.api.Remove(args)
c.Assert(err, jc.ErrorIsNil)
c.Assert(result, gc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{Error: nil},
{Error: nil},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: ¶ms.Error{Message: `"volume-invalid" is not a valid volume tag`}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestRemoveFilesystemsMachineAgent(c *gc.C) {
s.setupFilesystems(c)
s.authorizer.EnvironManager = false
args := params.Entities{Entities: []params.Entity{
{"filesystem-0-0"}, {"filesystem-0-42"}, {"filesystem-42"},
{"filesystem-invalid"}, {"machine-0"},
}}
err := s.State.DetachFilesystem(names.NewMachineTag("0"), names.NewFilesystemTag("0/0"))
c.Assert(err, jc.ErrorIsNil)
err = s.State.RemoveFilesystemAttachment(names.NewMachineTag("0"), names.NewFilesystemTag("0/0"))
c.Assert(err, jc.ErrorIsNil)
err = s.State.DestroyFilesystem(names.NewFilesystemTag("0/0"))
c.Assert(err, jc.ErrorIsNil)
result, err := s.api.Remove(args)
c.Assert(err, jc.ErrorIsNil)
c.Assert(result, gc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{Error: nil},
{Error: nil},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: ¶ms.Error{Message: `"filesystem-invalid" is not a valid filesystem tag`}},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
},
})
}
func (s *provisionerSuite) TestRemoveVolumeAttachments(c *gc.C) {
s.setupVolumes(c)
s.authorizer.EnvironManager = false
err := s.State.DetachVolume(names.NewMachineTag("0"), names.NewVolumeTag("1"))
c.Assert(err, jc.ErrorIsNil)
results, err := s.api.RemoveAttachment(params.MachineStorageIds{
Ids: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "volume-0-0",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-1",
}, {
MachineTag: "machine-2",
AttachmentTag: "volume-4",
}, {
MachineTag: "machine-0",
AttachmentTag: "volume-42",
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{Error: ¶ms.Error{Message: "removing attachment of volume 0/0 from machine 0: volume attachment is not dying"}},
{Error: nil},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: ¶ms.Error{Message: `removing attachment of volume 42 from machine 0: volume "42" on machine "0" not found`, Code: "not found"}},
},
})
}
func (s *provisionerSuite) TestRemoveFilesystemAttachments(c *gc.C) {
s.setupFilesystems(c)
s.authorizer.EnvironManager = false
err := s.State.DetachFilesystem(names.NewMachineTag("0"), names.NewFilesystemTag("1"))
c.Assert(err, jc.ErrorIsNil)
results, err := s.api.RemoveAttachment(params.MachineStorageIds{
Ids: []params.MachineStorageId{{
MachineTag: "machine-0",
AttachmentTag: "filesystem-0-0",
}, {
MachineTag: "machine-0",
AttachmentTag: "filesystem-1",
}, {
MachineTag: "machine-2",
AttachmentTag: "filesystem-4",
}, {
MachineTag: "machine-0",
AttachmentTag: "filesystem-42",
}},
})
c.Assert(err, jc.ErrorIsNil)
c.Assert(results, jc.DeepEquals, params.ErrorResults{
Results: []params.ErrorResult{
{Error: ¶ms.Error{Message: "removing attachment of filesystem 0/0 from machine 0: filesystem attachment is not dying"}},
{Error: nil},
{Error: ¶ms.Error{Message: "permission denied", Code: "unauthorized access"}},
{Error: ¶ms.Error{Message: `removing attachment of filesystem 42 from machine 0: filesystem "42" on machine "0" not found`, Code: "not found"}},
},
})
}
type byMachineAndEntity []params.MachineStorageId
func (b byMachineAndEntity) Len() int {
return len(b)
}
func (b byMachineAndEntity) Less(i, j int) bool {
if b[i].MachineTag == b[j].MachineTag {
return b[i].AttachmentTag < b[j].AttachmentTag
}
return b[i].MachineTag < b[j].MachineTag
}
func (b byMachineAndEntity) Swap(i, j int) {
b[i], b[j] = b[j], b[i]
}<|fim▁end|> | Message: `filesystem attachment "2" on "0" not provisioned`, |
<|file_name|>module.js<|end_file_name|><|fim▁begin|>/**
* @module ngeo.routing.module
*/
import ngeoRoutingRoutingComponent from 'ngeo/routing/RoutingComponent.js';
import './routing.less';
/**
* @type {angular.Module}
*/
const exports = angular.module('ngeoRoutingModule', [
ngeoRoutingRoutingComponent.module.name
]);
<|fim▁hole|><|fim▁end|> | export default exports; |
<|file_name|>AnalyticsMetadataTypeJsonUnmarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cognitoidp.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.cognitoidp.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* AnalyticsMetadataType JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AnalyticsMetadataTypeJsonUnmarshaller implements Unmarshaller<AnalyticsMetadataType, JsonUnmarshallerContext> {
public AnalyticsMetadataType unmarshall(JsonUnmarshallerContext context) throws Exception {
AnalyticsMetadataType analyticsMetadataType = new AnalyticsMetadataType();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;<|fim▁hole|> JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("AnalyticsEndpointId", targetDepth)) {
context.nextToken();
analyticsMetadataType.setAnalyticsEndpointId(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return analyticsMetadataType;
}
private static AnalyticsMetadataTypeJsonUnmarshaller instance;
public static AnalyticsMetadataTypeJsonUnmarshaller getInstance() {
if (instance == null)
instance = new AnalyticsMetadataTypeJsonUnmarshaller();
return instance;
}
}<|fim▁end|> | |
<|file_name|>sessionQuizView.tsx<|end_file_name|><|fim▁begin|>import * as React from "react";
import { connect } from "react-redux";
import { Link } from "react-router"
import * as MediaQuery from "react-responsive"
import DashboardContainer from "../../containers/dashboard/dashboardContainer"
import PresentationContainer from "../../containers/dashboard/presentationContainer"
import RemoteContainer from "../../containers/quiz/remoteContainer"
export interface StateProps {
rooms: { id: number, teacher: string}[]
isTeacher: boolean
roomOwner: string
}
export interface ActionProps {
updateRooms()
joinRoom(roomId: number)
leaveRoom()
closeRoom()
openRoom()
}<|fim▁hole|> props: Props
render() {
const {
rooms,
isTeacher,
roomOwner,
updateRooms,
joinRoom,
leaveRoom,
closeRoom,
openRoom
} = this.props
let dashboard: boolean = (this.props as any).location.pathname == "/dashboard",
presentation: boolean = (this.props as any).location.pathname == "/presentation",
remote: boolean = (this.props as any).location.pathname == "/remote"
return (
<div>
<div>
<button onClick={ updateRooms }>Update rooms</button>
<button onClick={ leaveRoom }>Leave room</button>
{ roomOwner != null && <button onClick={ closeRoom }>Close room</button> }
{ isTeacher && <button onClick={ openRoom }>Open room</button> }
Current room: { roomOwner != null ? roomOwner : "none" }
<br/>
{ rooms.map(room => {
return <button key={ room.id } onClick={ () => joinRoom(room.id) }>
Join Room { room.teacher }
</button>
})}
</div>
{ remote && <RemoteContainer/>}
{ dashboard && <DashboardContainer/>}
{ presentation && <PresentationContainer/>}
</div>
)
}
}<|fim▁end|> |
type Props = StateProps & ActionProps;
export class View extends React.Component<Props, any> { |
<|file_name|>native.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Jeremy Edwards
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package filesystem
import (
"net/http"
"path/filepath"
)<|fim▁hole|>func newNative(directory string) (http.Handler, error) {
dir, err := filepath.Abs(directory)
if err != nil {
return nil, err
}
return http.FileServer(http.Dir(dirPath(dir))), nil
}<|fim▁end|> | |
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RGdsfmt(RPackage):
"""R Interface to CoreArray Genomic Data Structure (GDS) Files.
This package provides a high-level R interface to CoreArray Genomic Data
Structure (GDS) data files, which are portable across platforms with
hierarchical structure to store multiple scalable array-oriented data
sets with metadata information. It is suited for large-scale datasets,
especially for data which are much larger than the available random-
access memory. The gdsfmt package offers the efficient operations
specifically designed for integers of less than 8 bits, since a diploid
genotype, like single-nucleotide polymorphism (SNP), usually occupies
fewer bits than a byte. Data compression and decompression are available
with relatively efficient random access. It is also allowed to read a
GDS file in parallel with multiple R processes supported by the package
parallel."""
homepage = "https://bioconductor.org/packages/gdsfmt"
git = "https://git.bioconductor.org/packages/gdsfmt.git"
version('1.20.0', commit='b1fbaba0a5ace3dc45daecc85168651cd85dce00')<|fim▁hole|> version('1.18.1', commit='b911b953e9db7988e93ec2010b0ab1e384d073c9')
version('1.16.0', commit='49b011452585e432b983b68466a230c9b71d8a95')
version('1.14.1', commit='15743647b7eea5b82d3284858b4591fb6e59959d')
version('1.12.0', commit='d705a95b0bea7be2a2b37e939f45017337ba0fb6')
depends_on('[email protected]:', type=('build', 'run'))<|fim▁end|> | |
<|file_name|>trace_macros.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use codemap::span;
use ext::base::ExtCtxt;
use ext::base;
use parse::lexer::{new_tt_reader, reader};
use parse::parser::Parser;
use parse::token::keywords;
pub fn expand_trace_macros(cx: @ExtCtxt,
sp: span,
tt: &[ast::token_tree])
-> base::MacResult {
let sess = cx.parse_sess();
let cfg = cx.cfg();
let tt_rdr = new_tt_reader(
copy cx.parse_sess().span_diagnostic,
None,
tt.to_owned()
);
let rdr = tt_rdr as @reader;<|fim▁hole|> let rust_parser = Parser(
sess,
copy cfg,
rdr.dup()
);
if rust_parser.is_keyword(keywords::True) {
cx.set_trace_macros(true);
} else if rust_parser.is_keyword(keywords::False) {
cx.set_trace_macros(false);
} else {
cx.span_fatal(sp, "trace_macros! only accepts `true` or `false`")
}
rust_parser.bump();
let rust_parser = Parser(sess, cfg, rdr.dup());
let result = rust_parser.parse_expr();
base::MRExpr(result)
}<|fim▁end|> | |
<|file_name|>activity.py<|end_file_name|><|fim▁begin|>"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER<|fim▁hole|>DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from typing import List, Literal, Optional, TypedDict
from .user import User
from .snowflake import Snowflake
StatusType = Literal['idle', 'dnd', 'online', 'offline']
class PartialPresenceUpdate(TypedDict):
user: User
guild_id: Snowflake
status: StatusType
activities: List[Activity]
client_status: ClientStatus
class ClientStatus(TypedDict, total=False):
desktop: StatusType
mobile: StatusType
web: StatusType
class ActivityTimestamps(TypedDict, total=False):
start: int
end: int
class ActivityParty(TypedDict, total=False):
id: str
size: List[int]
class ActivityAssets(TypedDict, total=False):
large_image: str
large_text: str
small_image: str
small_text: str
class ActivitySecrets(TypedDict, total=False):
join: str
spectate: str
match: str
class _ActivityEmojiOptional(TypedDict, total=False):
id: Snowflake
animated: bool
class ActivityEmoji(_ActivityEmojiOptional):
name: str
class ActivityButton(TypedDict):
label: str
url: str
class _SendableActivityOptional(TypedDict, total=False):
url: Optional[str]
ActivityType = Literal[0, 1, 2, 4, 5]
class SendableActivity(_SendableActivityOptional):
name: str
type: ActivityType
class _BaseActivity(SendableActivity):
created_at: int
class Activity(_BaseActivity, total=False):
state: Optional[str]
details: Optional[str]
timestamps: ActivityTimestamps
assets: ActivityAssets
party: ActivityParty
application_id: Snowflake
flags: int
emoji: Optional[ActivityEmoji]
secrets: ActivitySecrets
session_id: Optional[str]
instance: bool
buttons: List[ActivityButton]<|fim▁end|> | |
<|file_name|>flat-map-observable-scalar.js<|end_file_name|><|fim▁begin|>var RxOld = require("rx");
var RxNew = require("../../../../index");
module.exports = function (suite) {
var oldFlatMapWithCurrentThreadScheduler = RxOld.Observable.range(0, 25, RxOld.Scheduler.currentThread).flatMap(RxOld.Observable.return(0, RxOld.Scheduler.currentThread));
var newFlatMapWithCurrentThreadScheduler = RxNew.Observable.range(0, 25, RxNew.Scheduler.immediate).flatMapTo(RxNew.Observable.return(0, RxNew.Scheduler.immediate));
return suite
.add('old flatMap (scalar Observable) with current thread scheduler', function () {
oldFlatMapWithCurrentThreadScheduler.subscribe(_next, _error, _complete);<|fim▁hole|> newFlatMapWithCurrentThreadScheduler.subscribe(_next, _error, _complete);
});
function _next(x) { }
function _error(e){ }
function _complete(){ }
};<|fim▁end|> | })
.add('new flatMap (scalar Observable) with current thread scheduler', function () { |
<|file_name|>TestController.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# **********************************************************************
#
# Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import os, sys, threading, subprocess, getopt, signal
path = [ ".", "..", "../..", "../../..", "../../../.." ]
head = os.path.dirname(sys.argv[0])
if len(head) > 0:
path = [os.path.join(head, p) for p in path]
path = [os.path.abspath(p) for p in path if os.path.exists(os.path.join(p, "scripts", "TestUtil.py")) ]
if len(path) == 0:
raise RuntimeError("can't find toplevel directory!")
sys.path.append(os.path.join(path[0], "scripts"))
import TestUtil
def removeTrustSettings():
serverCert = os.path.join(path[0], "certs", "server.pem")
if os.system("security verify-cert -c " + serverCert + " >& /dev/null") == 0:<|fim▁hole|> if os.system("security remove-trusted-cert " + serverCert) != 0:
print("\nerror: couldn't remove trust settings for the HTTP server certificate")
else:
print("ok")
else:
print("trust settings already removed")
#
# On OS X, provide an option to allow removing the trust settings
#
if TestUtil.isDarwin():
try:
opts, args = getopt.getopt(sys.argv[1:], "", ["clean"])
if ("--clean", "") in opts:
removeTrustSettings()
sys.exit(0)
except getopt.GetoptError:
pass
version = "3.6.0"
jar = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..",
"java/test/controller/build/libs/testController-%(version)s.jar" % {"version": version})
javaHome = os.environ.get("JAVA_HOME", "")
javaCmd = '%s' % os.path.join(javaHome, "bin", "java") if javaHome else "java"
command = [javaCmd, "-jar", jar]
if len(sys.argv) > 1:
command += sys.argv[1:]
p = subprocess.Popen(command, shell = False, stdin = subprocess.PIPE, stdout = subprocess.PIPE,
stderr = subprocess.STDOUT, bufsize = 0)
def signal_handler(signal, frame):
if p:
p.terminate()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
if TestUtil.isDarwin():
#
# On OS X, we set the trust settings on the certificate to prevent
# the Web browsers from prompting the user about the unstrusted
# certificate. Some browsers such as Chrome don't provide the
# option to set this trust settings.
#
serverCert = os.path.join(TestUtil.toplevel, "certs", "server.pem")
if os.system("security verify-cert -c " + serverCert + " >& /dev/null") != 0:
sys.stdout.write("adding trust settings for the HTTP server certificate... ")
sys.stdout.flush()
if os.system("security add-trusted-cert -r trustAsRoot " + serverCert) != 0:
print("error: couldn't add trust settings for the HTTP server certificate")
print("ok")
print("run " + sys.argv[0] + " --clean to remove the trust setting")
while(True):
c = p.stdout.read(1)
if not c: break
if c == '\r': continue
# Depending on Python version and platform, the value c could be a
# string or a bytes object.
if type(c) != str:
c = c.decode()
sys.stdout.write(c)
sys.stdout.flush()<|fim▁end|> | sys.stdout.write("removing trust settings for the HTTP server certificate... ")
sys.stdout.flush() |
<|file_name|>httpd_test.go<|end_file_name|><|fim▁begin|>// Copyright 2020 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package network
import (
"os"
"strconv"
"testing"
"gvisor.dev/gvisor/pkg/test/dockerutil"
"gvisor.dev/gvisor/test/benchmarks/harness"
"gvisor.dev/gvisor/test/benchmarks/tools"
)
// see Dockerfile '//images/benchmarks/httpd'.
var httpdDocs = map[string]string{
"notfound": "notfound",
"1Kb": "latin1k.txt",
"10Kb": "latin10k.txt",
"100Kb": "latin100k.txt",
"1Mb": "latin1024k.txt",
"10Mb": "latin10240k.txt",
}
// BenchmarkHttpd iterates over different sized payloads and concurrency, testing
// how well the runtime handles sending different payload sizes.
func BenchmarkHttpd(b *testing.B) {
benchmarkHttpdDocSize(b)
}
// BenchmarkContinuousHttpd runs specific benchmarks for continous jobs.
// The runtime under test is the server serving a runc client.
func BenchmarkContinuousHttpd(b *testing.B) {
sizes := []string{"10Kb", "100Kb", "1Mb"}
threads := []int{1, 25, 100, 1000}
benchmarkHttpdContinuous(b, threads, sizes)
}
// benchmarkHttpdDocSize iterates through all doc sizes, running subbenchmarks
// for each size.
func benchmarkHttpdDocSize(b *testing.B) {
b.Helper()
for size, filename := range httpdDocs {
concurrency := []int{1, 25, 50, 100, 1000}
for _, c := range concurrency {
fsize := tools.Parameter{
Name: "filesize",
Value: size,
}
concurrency := tools.Parameter{
Name: "concurrency",
Value: strconv.Itoa(c),
}
name, err := tools.ParametersToName(fsize, concurrency)
if err != nil {
b.Fatalf("Failed to parse parameters: %v", err)
}
b.Run(name, func(b *testing.B) {
hey := &tools.Hey{
Requests: b.N,
Concurrency: c,
Doc: filename,
}
runHttpd(b, hey)
})
}
}
}
// benchmarkHttpdContinuous iterates through given sizes and concurrencies.
func benchmarkHttpdContinuous(b *testing.B, concurrency []int, sizes []string) {
for _, size := range sizes {
filename := httpdDocs[size]
for _, c := range concurrency {
fsize := tools.Parameter{
Name: "filesize",
Value: size,
}
threads := tools.Parameter{<|fim▁hole|> Value: strconv.Itoa(c),
}
name, err := tools.ParametersToName(fsize, threads)
if err != nil {
b.Fatalf("Failed to parse parameters: %v", err)
}
b.Run(name, func(b *testing.B) {
hey := &tools.Hey{
Requests: b.N,
Concurrency: c,
Doc: filename,
}
runHttpd(b, hey)
})
}
}
}
// runHttpd configures the static serving methods to run httpd.
func runHttpd(b *testing.B, hey *tools.Hey) {
// httpd runs on port 80.
port := 80
httpdRunOpts := dockerutil.RunOpts{
Image: "benchmarks/httpd",
Ports: []int{port},
Env: []string{
// Standard environmental variables for httpd.
"APACHE_RUN_DIR=/tmp",
"APACHE_RUN_USER=nobody",
"APACHE_RUN_GROUP=nogroup",
"APACHE_LOG_DIR=/tmp",
"APACHE_PID_FILE=/tmp/apache.pid",
},
}
httpdCmd := []string{"sh", "-c", "mkdir -p /tmp/html; cp -r /local/* /tmp/html/.; apache2 -X"}
runStaticServer(b, httpdRunOpts, httpdCmd, port, hey)
}
func TestMain(m *testing.M) {
harness.Init()
os.Exit(m.Run())
}<|fim▁end|> | Name: "concurrency", |
<|file_name|>run_deploy_jar_intergration_test.py<|end_file_name|><|fim▁begin|># Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import textwrap
from textwrap import dedent
from pants.engine.internals.native_engine import FileDigest
from pants.jvm.resolve.common import ArtifactRequirement, Coordinate, Coordinates
from pants.jvm.resolve.coursier_fetch import CoursierLockfileEntry, CoursierResolvedLockfile
from pants.jvm.resolve.coursier_test_util import TestCoursierWrapper
from pants.testutil.pants_integration_test import run_pants, setup_tmpdir
EMPTY_RESOLVE = """
# --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---
# {{
# "version": 1,
# "generated_with_requirements": [
# ]
# }}
# --- END PANTS LOCKFILE METADATA ---
"""
DEFAULT_LOCKFILE = (
TestCoursierWrapper(
CoursierResolvedLockfile(
(
CoursierLockfileEntry(
coord=Coordinate(
group="org.scala-lang", artifact="scala-library", version="2.13.6"
),
file_name="org.scala-lang_scala-library_2.13.6.jar",
direct_dependencies=Coordinates(),
dependencies=Coordinates(),
file_digest=FileDigest(
"f19ed732e150d3537794fd3fe42ee18470a3f707efd499ecd05a99e727ff6c8a", 5955737
),
),
)
)
)
.serialize(
[
ArtifactRequirement(
coordinate=Coordinate(
group="org.scala-lang", artifact="scala-library", version="2.13.6"
)
)
]
)
.replace("{", "{{")
.replace("}", "}}")
)
DEFAULT_SCALA_LIBRARY_TARGET = textwrap.dedent(
"""\
jvm_artifact(
name="org.scala-lang_scala-library_2.13.6",
group="org.scala-lang",
artifact="scala-library",
version="2.13.6",
)
"""
)
def test_java() -> None:
sources = {
"src/org/pantsbuild/test/Hello.java": dedent(
"""\
package org.pantsbuild.test;
public class Hello {{
public static void main(String[] args) {{
System.out.println("Hello, World!");
}}
}}
"""
),
"src/org/pantsbuild/test/BUILD": dedent(
"""\
java_sources()
deploy_jar(
name="test_deploy_jar",
main="org.pantsbuild.test.Hello",
dependencies=[":test"],
)
"""
),
"lockfile": EMPTY_RESOLVE,
}
with setup_tmpdir(sources) as tmpdir:
args = [
"--backend-packages=pants.backend.experimental.java",
f"--source-root-patterns=['{tmpdir}/src']",
"--pants-ignore=__pycache__",
f'--jvm-resolves={{"empty": "{tmpdir}/lockfile"}}',
"--jvm-default-resolve=empty",
"run",
f"{tmpdir}/src/org/pantsbuild/test:test_deploy_jar",
]
result = run_pants(args)
assert result.stdout.strip() == "Hello, World!"
def test_scala() -> None:
sources = {
"src/org/pantsbuild/test/Hello.scala": dedent(
"""\
package org.pantsbuild.test;
object Hello {{
def main(args: Array[String]): Unit = {{
println("Hello, World!")
}}
}}
"""
),
"src/org/pantsbuild/test/BUILD": dedent(
"""\
scala_sources()
deploy_jar(
name="test_deploy_jar",
main="org.pantsbuild.test.Hello",
dependencies=[":test"],
)
"""
),
"BUILD": DEFAULT_SCALA_LIBRARY_TARGET,
"lockfile": DEFAULT_LOCKFILE,
}
with setup_tmpdir(sources) as tmpdir:
args = [
"--backend-packages=pants.backend.experimental.scala",
f"--source-root-patterns=['{tmpdir}/src']",
"--pants-ignore=__pycache__",
f'--jvm-resolves={{"jvm-default": "{tmpdir}/lockfile"}}',
"--jvm-default-resolve=jvm-default",
"run",
f"{tmpdir}/src/org/pantsbuild/test:test_deploy_jar",
]<|fim▁hole|> assert result.stdout.strip() == "Hello, World!"<|fim▁end|> | result = run_pants(args) |
<|file_name|>InternalFilter.java<|end_file_name|><|fim▁begin|>/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation;
import java.io.IOException;
/**
*
*/
public class InternalFilter extends InternalSingleBucketAggregation implements Filter {
public final static Type TYPE = new Type("filter");
public final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
@Override
public InternalFilter readResult(StreamInput in) throws IOException {
InternalFilter result = new InternalFilter();
result.readFrom(in);
return result;
}
};
public static void registerStreams() {
AggregationStreams.registerStream(STREAM, TYPE.stream());
}
InternalFilter() {} // for serialization
InternalFilter(String name, long docCount, InternalAggregations subAggregations) {
super(name, docCount, subAggregations);
}
@Override
public Type type() {
return TYPE;
}
@Override
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {<|fim▁hole|>}<|fim▁end|> | return new InternalFilter(name, docCount, subAggregations);
} |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>from django.test import TestCase<|fim▁hole|>from morelia.decorators import tags
from smarttest.decorators import no_db_testcase
from tasks.factories import TaskFactory, UserFactory
@no_db_testcase
@tags(['unit'])
class TaskGetAbsoluteUrlTest(TestCase):
''' :py:meth:`tasks.models.Task.get_absolute_url` '''
def test_should_return_task_absolute_url(self):
# Arrange
owner = UserFactory.build(pk=1)
task = TaskFactory.build(owner=owner, author=owner)
# Act
url = task.get_absolute_url()
# Assert
self.assertEqual(url, '/%s/' % owner.username)<|fim▁end|> | |
<|file_name|>MemoDao.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2012 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*<|fim▁hole|> * but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <[email protected]>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.netmgt.dao.api;
import org.opennms.netmgt.model.OnmsMemo;
/**
* @author <a href="mailto:[email protected]">Markus Neumann</a>
*/
public interface MemoDao extends OnmsDao<OnmsMemo, Integer> {
}<|fim▁end|> | * OpenNMS(R) is distributed in the hope that it will be useful, |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>(function() {
function config($stateProvider, $locationProvider) {
$locationProvider
.html5Mode({
enabled: true,
requireBase: false
});
$stateProvider
.state('landing', {
url: '/',
controller: 'LandingCtrl as landing',
templateUrl: '/templates/landing.html'
})
.state('newroom', {<|fim▁hole|> url: '/',
controller: 'NewRoomCtrl as newroom',
templateUrl: '/templates/newroom.html'
})
.state('login', {
url: '/',
controller: 'LoginCtrl as login',
templateUrl: '/templates/login.html'
});
}
function BlocChatCookies($cookies,$uibModal) {
if (!$cookies.blocChatCurrentUser || $cookies.blocChatCurrentUser === '') {
this.animationsEnabled = true;
$uibModal.open({
animation: this.animationsEnabled,
backdrop: 'static',
templateUrl: '/templates/login.html',
size: "sm",
controller: "LoginCtrl",
controllerAs: "login",
});
}
}
angular
.module('blocChat',['ui.router','ui.bootstrap','firebase','ngCookies'])
.config(config)
.run(['$cookies','$uibModal', BlocChatCookies]);
})();<|fim▁end|> | |
<|file_name|>client_json.rs<|end_file_name|><|fim▁begin|>#![deny(warnings)]
extern crate hyper;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
use hyper::Client;
use hyper::rt::{self, Future, Stream};
fn main() {
let url = "http://jsonplaceholder.typicode.com/users".parse().unwrap();
let fut = fetch_json(url)
// use the parsed vector
.map(|users| {
// print users
println!("users: {:#?}", users);
// print the sum of ids
let sum = users.iter().fold(0, |acc, user| acc + user.id);
println!("sum of ids: {}", sum);
})
// if there was an error print it
.map_err(|e| {
match e {
FetchError::Http(e) => eprintln!("http error: {}", e),
FetchError::Json(e) => eprintln!("json parsing error: {}", e),
}
});
// Run the runtime with the future trying to fetch, parse and print json.
//
// Note that in more complicated use cases, the runtime should probably
// run on its own, and futures should just be spawned into it.
rt::run(fut);
}
fn fetch_json(url: hyper::Uri) -> impl Future<Item=Vec<User>, Error=FetchError> {
let client = Client::new();
client
// Fetch the url...
.get(url)
// And then, if we get a response back...
.and_then(|res| {
// asynchronously concatenate chunks of the body
res.into_body().concat2()
})
.from_err::<FetchError>()
// use the body after concatenation
.and_then(|body| {
// try to parse as json with serde_json
let users = serde_json::from_slice(&body)?;
Ok(users)
})
.from_err()
}
#[derive(Deserialize, Debug)]
struct User {
id: i32,
name: String,
}
// Define a type so we can return multiple types of errors
enum FetchError {
Http(hyper::Error),
Json(serde_json::Error),
}
impl From<hyper::Error> for FetchError {
fn from(err: hyper::Error) -> FetchError {
FetchError::Http(err)
}
}<|fim▁hole|>
impl From<serde_json::Error> for FetchError {
fn from(err: serde_json::Error) -> FetchError {
FetchError::Json(err)
}
}<|fim▁end|> | |
<|file_name|>arithmetic.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate nom;
use nom::{IResult,digit};
// Parser definition
use std::str;
use std::str::FromStr;
// We parse any expr surrounded by parens, ignoring all whitespaces around those
named!(parens<i64>, ws!(delimited!( tag!("("), expr, tag!(")") )) );
// We transform an integer string into a i64, ignoring surrounding whitespaces
// We look for a digit suite, and try to convert it.
// If either str::from_utf8 or FromStr::from_str fail,
// we fallback to the parens parser defined above
named!(factor<i64>, alt!(
map_res!(
map_res!(
ws!(digit),
str::from_utf8
),
FromStr::from_str
)
| parens
)
);<|fim▁hole|>named!(term <i64>, do_parse!(
init: factor >>
res: fold_many0!(
pair!(alt!(tag!("*") | tag!("/")), factor),
init,
|acc, (op, val): (&[u8], i64)| {
if (op[0] as char) == '*' { acc * val } else { acc / val }
}
) >>
(res)
)
);
named!(expr <i64>, do_parse!(
init: term >>
res: fold_many0!(
pair!(alt!(tag!("+") | tag!("-")), term),
init,
|acc, (op, val): (&[u8], i64)| {
if (op[0] as char) == '+' { acc + val } else { acc - val }
}
) >>
(res)
)
);
#[test]
fn factor_test() {
assert_eq!(factor(&b"3"[..]), IResult::Done(&b""[..], 3));
assert_eq!(factor(&b" 12"[..]), IResult::Done(&b""[..], 12));
assert_eq!(factor(&b"537 "[..]), IResult::Done(&b""[..], 537));
assert_eq!(factor(&b" 24 "[..]), IResult::Done(&b""[..], 24));
}
#[test]
fn term_test() {
assert_eq!(term(&b" 12 *2 / 3"[..]), IResult::Done(&b""[..], 8));
assert_eq!(term(&b" 2* 3 *2 *2 / 3"[..]), IResult::Done(&b""[..], 8));
assert_eq!(term(&b" 48 / 3/2"[..]), IResult::Done(&b""[..], 8));
}
#[test]
fn expr_test() {
assert_eq!(expr(&b" 1 + 2 "[..]), IResult::Done(&b""[..], 3));
assert_eq!(expr(&b" 12 + 6 - 4+ 3"[..]), IResult::Done(&b""[..], 17));
assert_eq!(expr(&b" 1 + 2*3 + 4"[..]), IResult::Done(&b""[..], 11));
}
#[test]
fn parens_test() {
assert_eq!(expr(&b" ( 2 )"[..]), IResult::Done(&b""[..], 2));
assert_eq!(expr(&b" 2* ( 3 + 4 ) "[..]), IResult::Done(&b""[..], 14));
assert_eq!(expr(&b" 2*2 / ( 5 - 1) + 3"[..]), IResult::Done(&b""[..], 4));
}<|fim▁end|> |
// We read an initial factor and for each time we find
// a * or / operator followed by another factor, we do
// the math by folding everything |
<|file_name|>arffio.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
'''
Arff loader for categorical and numerical attributes, based
on scipy.io.arff.arffloader With minor changes for this
project (eg. categorical attributes are mapped onto integers
and whole dataset is returned as numpy array of floats)
If any unsupported data types appear or if arff is malformed,
ParseArffError with info about error is raised.
@author Miroslav Hlavacek <[email protected]>
'''
from __future__ import division, absolute_import
from functools import partial
import numpy as np
from ..dataio.dataio_const import DataIOError
from ..dataio.dataio_const import NUMERIC_ATT
from ..dataio.dataio_const import NOMINAL_ATT
class ParseArffError(DataIOError):
""" Error while parsing arff file - either
malformed arff or unsupported arff functionality
"""
pass
def loadarff(f):
"""Read an arff file.
Retrieves name of relation, attribute names and types, possible values
of nominal attributes and data. The data is returned as a numpy array of
floats.\n
It can read files with numeric and nominal attributes. All nominal
attribute values are converted to integers (but stored as floats -
because of numpy).\n
Not implemented functionality:\n
* date type attributes\n
* string type attributes\n
* relational type attributes\n
* sparse files reading\n
* missing values handling\n
@param f : file-like or str - object to read from, or filename to open.
@returns Tuple (relation, ls_attributes, d_nominal_values, data)
where:\n
\b relation is string name of relation in arff\n
\b ls_attributes is list with all attribute names\n
\b d_nominal_values is dictionary containing lists with all
possible values for each nominal attribute. Key to this
list is integer - position of attribute in ls_attributes.
\b data is numpy array of float type, where shape is
(n_samples, n_attributes)
@throws ParseArffError This is raised if the given file is not
ARFF-formatted or some values are missing
or some values are of bad type or if some
data type is unsupported.
"""
if hasattr(f, 'read'):
ofile = f
else:
ofile = open(f, 'rt')
try:
return _loadarff(ofile)
finally:
if ofile is not f: # only close what we opened
ofile.close()
def _loadarff(in_file):
# Parse the header file
try:
relation, ls_atts, d_nom_vals = read_header(in_file)
except ValueError as e:
raise ParseArffError("Error while parsing header, error was: "
+ str(e))
#prepare convertors and parse data
convertors = []
idx = 0
for name, att_type in ls_atts:
if att_type == NUMERIC_ATT:
convertors.append(safe_float)
elif att_type == NOMINAL_ATT:
convertors.append(partial(safe_nominal, ls_values=d_nom_vals[idx]))
idx += 1
n_columns = len(convertors)
def generator(row_iter):
# skip comments and empty lines
raw = row_iter.next()
while len(raw.strip()) == 0 or raw[0] == '%':
raw = row_iter.next()
try:
# retrieve delimiter of data from first data field
delim = get_delim(raw)
rows = raw.split(delim)
if len(rows) != n_columns:
raise ParseArffError('Wrong number of attributes on line: '
+ raw.strip())
# 'compiling' the range since it does not change
elems = list(range(n_columns))
for i in elems:
yield convertors[i](rows[i])
except ValueError as e:
raise ParseArffError('Error while parsing data: "%s" on line "%s"'
% (str(e), raw.strip()))
for raw in row_iter:
rows = raw.split(delim)<|fim▁hole|> if len(rows) != n_columns:
raise ParseArffError('Wrong number of attributes on line: '
+ raw)
try:
for i in elems:
yield convertors[i](rows[i])
except ValueError as e:
raise ParseArffError('Type error or missing value while '
'parsing data: "%s" on line:"%s"'
% (str(e), raw))
gen = generator(in_file)
data = np.fromiter(gen, complex)
# reshape array appropriately
data = data.reshape(data.shape[0] / n_columns, n_columns)
return relation, ls_atts, d_nom_vals, data
def read_header(in_file):
"""Read the header of the iterable in_file.
Parse all attribute names, types and store
possible values for any encountered nominal attribute.
@param in_file File opened for textual reading
@returns Tuple (relation, ls_attributes, d_nominal_values)
where:\n
\b relation is string name of relation in arff\n
\b ls_attributes is list with all attribute names\n
\b d_nominal_values is dictionary containing lists with all
possible values for each nominal attribute. Key to this
list is integer - position of attribute in ls_attributes.
"""
# Header is everything up to DATA attribute
relation = "Unknown relation"
ls_attributes = []
d_nominal_vals = {}
num_attributes = 0
keyword = ''
while keyword != '@data':
line = next(in_file)
chunks = line.rstrip('\n').split()
# ignore blank lines and commments
if not chunks or chunks[0][0] != '@':
continue
try:
keyword = chunks[0].lower()
if keyword == '@attribute':
name = chunks[1]
att_type = parse_type(chunks[2])
val_names = None
if att_type == NOMINAL_ATT:
val_names = chunks[2].strip('{}').split(',')
ls_attributes.append((name, att_type))
if not val_names is None:
d_nominal_vals[num_attributes] = val_names
num_attributes += 1
elif keyword == '@relation':
relation = chunks[1]
elif keyword != '@data':
raise ParseArffError("Error parsing line %s" % line)
except KeyError as e:
raise ParseArffError('Malformed arff attribute: %s on line %s '
% (str(e), line))
return relation, ls_attributes, d_nominal_vals
def parse_type(attrtype):
"""Given an arff attribute type description returns
whether is attribute nominal or numeric, for other
data types, ParseArffError is raised.
@param String representing value of attribute
@return String with either for given type defined in dataio...
either NUMERIC_ATT or NOMINAL_ATT
@throw ParseArffError If the type is unknown or unsupported
"""
atype = attrtype.lower().strip()
if atype[0] == '{':
return NOMINAL_ATT
elif atype[:len('real')] == 'real':
return NUMERIC_ATT
elif atype[:len('integer')] == 'integer':
return NUMERIC_ATT
elif atype[:len('numeric')] == 'numeric':
return NUMERIC_ATT
else:
raise ParseArffError("Unknown or unsupported attribute %s" % atype)
def safe_float(data):
""" float convertor """
if data.strip()[0] == '{':
raise ValueError("This looks like a sparse ARFF: not supported yet")
return np.float(data)
def safe_nominal(data, ls_values):
""" nominal convertor """
svalue = data.strip()
if svalue[0] == '{':
raise ValueError("This looks like a sparse ARFF: not supported yet")
if svalue in ls_values:
return ls_values.index(svalue)
else:
raise ValueError('Not defined value of nominal attribute')
def get_delim(line):
"""Given a string representing a line of data, check whether the
delimiter is ',' or space.
"""
if ',' in line:
return ','
if ' ' in line:
return ' '
raise ValueError("delimiter not understood: " + line)<|fim▁end|> | while not rows or rows[0][0] == '%':
raw = row_iter.next()
rows = raw.split(delim)
|
<|file_name|>typescript.js<|end_file_name|><|fim▁begin|>define("ace/snippets/typescript",["require","exports","module"],function(e,t,n){"use strict";t.snippetText="",t.scope="typescript"});
(function() {
window.require(["ace/snippets/typescript"], function(m) {
if (typeof module == "object") {<|fim▁hole|><|fim▁end|> | module.exports = m;
}
});
})(); |
<|file_name|>makeseeds.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Generate pnSeed[] from Pieter's DNS seeder
#
NSEEDS=600
import re
import sys
from subprocess import check_output
def main():
lines = sys.stdin.readlines()
ips = []
pattern = re.compile(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3}):11000")
for line in lines:
m = pattern.match(line)
if m is None:
continue
ip = 0<|fim▁hole|> for i in range(0,4):
ip = ip + (int(m.group(i+1)) << (8*(i)))
if ip == 0:
continue
ips.append(ip)
for row in range(0, min(NSEEDS,len(ips)), 8):
print " " + ", ".join([ "0x%08x"%i for i in ips[row:row+8] ]) + ","
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>ensurePositiveDelayProps.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Facebook, Inc. and its affiliates.
*<|fim▁hole|> *
*/
import invariant from 'fbjs/lib/invariant';
var ensurePositiveDelayProps = function ensurePositiveDelayProps(props) {
invariant(!(props.delayPressIn < 0 || props.delayPressOut < 0 || props.delayLongPress < 0), 'Touchable components cannot have negative delay properties');
};
export default ensurePositiveDelayProps;<|fim▁end|> | * This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
* |
<|file_name|>65.test.js<|end_file_name|><|fim▁begin|>import test from 'ava';
import Server from '../../src/server';
import IO from '../../src/socket-io';
test.cb('mock socket invokes each handler with unique reference', t => {
const socketUrl = 'ws://roomy';
const server = new Server(socketUrl);
const socket = new IO(socketUrl);
let handlerInvoked = 0;
const handler3 = function handlerFunc() {
t.true(true);
handlerInvoked += 1;
};
// Same functions but different scopes/contexts
socket.on('custom-event', handler3.bind(Object.create(null)));
socket.on('custom-event', handler3.bind(Object.create(null)));
// Same functions with same scope/context (only one should be added)
socket.on('custom-event', handler3);<|fim▁hole|> socket.join('room');
server.to('room').emit('custom-event');
});
setTimeout(() => {
t.is(handlerInvoked, 3, 'handler invoked too many times');
server.close();
t.end();
}, 500);
});
test.cb('mock socket invokes each handler per socket', t => {
const socketUrl = 'ws://roomy';
const server = new Server(socketUrl);
const socketA = new IO(socketUrl);
const socketB = new IO(socketUrl);
let handlerInvoked = 0;
const handler3 = function handlerFunc() {
t.true(true);
handlerInvoked += 1;
};
// Same functions but different scopes/contexts
socketA.on('custom-event', handler3.bind(socketA));
socketB.on('custom-event', handler3.bind(socketB));
// Same functions with same scope/context (only one should be added)
socketA.on('custom-event', handler3);
socketA.on('custom-event', handler3); // not expected
socketB.on('custom-event', handler3.bind(socketB)); // expected because bind creates a new method
socketA.on('connect', () => {
socketA.join('room');
socketB.join('room');
server.to('room').emit('custom-event');
});
setTimeout(() => {
t.is(handlerInvoked, 4, 'handler invoked too many times');
server.close();
t.end();
}, 500);
});<|fim▁end|> | socket.on('custom-event', handler3); // not expected
socket.on('connect', () => { |
<|file_name|>run_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import re
import subprocess
import sys
import tempfile
CC = "gcc"
CFLAGS = "-fmax-errors=4 -std=c99 -pipe -D_POSIX_C_SOURCE=200809L -W -Wall -Wno-unused-variable -Wno-unused-parameter -Wno-unused-label -Wno-unused-value -Wno-unused-but-set-variable -Wno-unused-function -Wno-main".split(" ")
class Table():
pass
class TestMode():
pass_ = 0
fail_compile_parse = 1
fail_compile_sem = 2
fail_compile_ice = 3<|fim▁hole|> fail_c = 4
fail_run = 5
fail_output = 6
fail_other = 7
disable = 8
test_modes = [TestMode.pass_, TestMode.fail_compile_parse,
TestMode.fail_compile_sem, TestMode.fail_compile_ice,
TestMode.fail_c, TestMode.fail_run, TestMode.fail_output,
TestMode.fail_other, TestMode.disable]
test_mode_names = {
TestMode.pass_: ("pass", "Passed"),
TestMode.fail_compile_parse: ("fail_compile_parse", "Compilation failed (parsing)"),
TestMode.fail_compile_sem: ("fail_compile_sem", "Compilation failed (semantics)"),
TestMode.fail_compile_ice: ("fail_compile_ice", "Compilation failed (ICE)"),
TestMode.fail_c: ("fail_c", "C compilation/linking failed"),
TestMode.fail_run: ("fail_run", "Run failed"),
TestMode.fail_output: ("fail_output", "Output mismatched"),
TestMode.fail_other: ("fail_other", "Expected failure didn't happen"),
TestMode.disable: ("disable", "Disabled"),
}
test_stats = dict([(m, 0) for m in test_modes])
test_mode_values = {}
for m, (s, _) in test_mode_names.iteritems():
test_mode_values[s] = m
def pick(v, m):
if v not in m:
raise Exception("Unknown value '%s'" % v)
return m[v]
def run_test(filename):
testname = os.path.basename(filename)
print("Test '%s'..." % testname)
workdir = tempfile.mkdtemp(prefix="boringtest")
tempfiles = []
src = open(filename)
headers = Table()
headers.mode = TestMode.pass_
headers.is_expr = False
headers.stdout = None
while True:
hline = src.readline()
if not hline:
break
m = re.match("(?://|/\*) ([A-Z]+):(.*)", hline)
if not m:
break
name, value = m.group(1), m.group(2)
value = value.strip()
if name == "TEST":
headers.mode = pick(value, test_mode_values)
elif name == "TYPE":
headers.is_expr = pick(value, {"normal": False, "expr": True})
elif name == "STDOUT":
term = value + "*/"
stdout = ""
while True:
line = src.readline()
if not line:
raise Exception("unterminated STDOUT header")
if line.strip() == term:
break
stdout += line
headers.stdout = stdout
else:
raise Exception("Unknown header '%s'" % name)
src.close()
def do_run():
if headers.mode == TestMode.disable:
return TestMode.disable
# make is for fags
tc = os.path.join(workdir, "t.c")
tcf = open(tc, "w")
tempfiles.append(tc)
res = subprocess.call(["./main", "cg_c", filename], stdout=tcf)
tcf.close()
if res != 0:
if res == 1:
return TestMode.fail_compile_parse
if res == 2:
return TestMode.fail_compile_sem
return TestMode.fail_compile_ice
t = os.path.join(workdir, "t")
tempfiles.append(t)
res = subprocess.call([CC] + CFLAGS + [tc, "-o", t])
if res != 0:
return TestMode.fail_c
p = subprocess.Popen([t], stdout=subprocess.PIPE)
output, _ = p.communicate()
res = p.wait()
if res != 0:
return TestMode.fail_run
if headers.stdout is not None and headers.stdout != output:
print("Program output: >\n%s<\nExpected: >\n%s<" % (output,
headers.stdout))
return TestMode.fail_output
return TestMode.pass_
actual_res = do_run()
for f in tempfiles:
try:
os.unlink(f)
except OSError:
pass
os.rmdir(workdir)
res = actual_res
if res == TestMode.disable:
pass
elif res == headers.mode:
res = TestMode.pass_
else:
if headers.mode != TestMode.pass_:
res = TestMode.fail_other
test_stats[res] += 1
print("Test '%s': %s (expected %s, got %s)" % (testname,
test_mode_names[res][0], test_mode_names[headers.mode][0],
test_mode_names[actual_res][0]))
def run_tests(list_file_name):
base = os.path.dirname(list_file_name)
for f in [x.strip() for x in open(argv[1])]:
run_test(os.path.join(base, f))
print("SUMMARY:")
test_sum = 0
for m in test_modes:
print(" %s: %d" % (test_mode_names[m][1], test_stats[m]))
test_sum += test_stats[m]
passed_tests = test_stats[TestMode.pass_]
failed_tests = test_sum - passed_tests - test_stats[TestMode.disable]
print("Passed/failed: %s/%d" % (passed_tests, failed_tests))
if failed_tests:
print("OMG OMG OMG ------- Some tests have failed ------- OMG OMG OMG")
sys.exit(1)
if __name__ == "__main__":
argv = sys.argv
if len(argv) != 2:
print("Usage: %s tests.lst" % argv[0])
sys.exit(1)
#subprocess.check_call(["make", "main"])
run_tests(argv[1])<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import with_statement
#-------------------------------------------------------------------------------
import unittest
from xml.etree.ElementTree import fromstring
#-------------------------------------------------------------------------------
from xmlbuilder import XMLBuilder
#-------------------------------------------------------------------------------
def xmlStructureEqual(xml1,xml2):
tree1 = fromstring(xml1)
tree2 = fromstring(xml2)
return _xmlStructureEqual(tree1,tree2)
#-------------------------------------------------------------------------------
def _xmlStructureEqual(tree1,tree2):
if tree1.tag != tree2.tag:
return False
attr1 = list(tree1.attrib.items())
attr1.sort()
attr2 = list(tree2.attrib.items())
attr2.sort()
if attr1 != attr2:
return False
return tree1.getchildren() == tree2.getchildren()
#-------------------------------------------------------------------------------
result1 = \
"""
<root>
<array />
<array len="10">
<el val="0" />
<el val="1">xyz</el>
<el val="2">abc</el>
<el val="3" />
<el val="4" />
<el val="5" />
<sup-el val="23">test </sup-el>
</array>
</root>
""".strip()
#-------------------------------------------------------------------------------
class TestXMLBuilder(unittest.TestCase):
def testShift(self):
xml = (XMLBuilder() << ('root',))
self.assertEqual(str(xml),"<root />")
xml = XMLBuilder()
xml << ('root',"some text")
self.assertEqual(str(xml),"<root>some text</root>")
xml = XMLBuilder()
xml << ('root',{'x':1,'y':'2'})
self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>some text</root>"))
xml = XMLBuilder()
xml << ('root',{'x':1,'y':'2'})
self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'></root>"))
xml = XMLBuilder()
xml << ('root',{'x':1,'y':'2'})
self.assert_(not xmlStructureEqual(str(xml),"<root x='2' y='2'></root>"))
xml = XMLBuilder()
xml << ('root',"gonduras.ua",{'x':1,'y':'2'})
self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>gonduras.ua</root>"))
xml = XMLBuilder()
xml << ('root',"gonduras.ua",{'x':1,'y':'2'})
self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>gonduras.com</root>"))
#---------------------------------------------------------------------------
def testWith(self):
xml = XMLBuilder()
with xml.root(lenght = 12):
pass
self.assertEqual(str(xml),'<root lenght="12" />')
xml = XMLBuilder()
with xml.root():
xml << "text1" << "text2" << ('some_node',)
self.assertEqual(str(xml),"<root>text1text2<some_node /></root>")
#---------------------------------------------------------------------------
def testFormat(self):
<|fim▁hole|> x << ('array',)
with x.array(len = 10):
with x.el(val = 0):
pass
with x.el('xyz',val = 1):
pass
x << ("el","abc",{'val':2}) << ('el',dict(val=3))
x << ('el',dict(val=4)) << ('el',dict(val='5'))
with x('sup-el',val = 23):
x << "test "
self.assertEqual(str(x),result1)
#-------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
#-------------------------------------------------------------------------------<|fim▁end|> | x = XMLBuilder('utf-8',format = True)
with x.root():
|
<|file_name|>personResult.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
/**
* Person object.
*
*/
class PersonResult {
/**
* Create a PersonResult.
* @member {string} personId personId of the target face list.
* @member {array} [persistedFaceIds] persistedFaceIds of registered faces in
* the person. These persistedFaceIds are returned from Person - Add a Person
* Face, and will not expire.
* @member {string} [name] Person's display name.
* @member {string} [userData] User-provided data attached to this person.
*/
constructor() {
}
/**
* Defines the metadata of PersonResult
*
* @returns {object} metadata of PersonResult
*
*/
mapper() {
return {
required: false,
serializedName: 'PersonResult',
type: {
name: 'Composite',
className: 'PersonResult',
modelProperties: {
personId: {
required: true,
serializedName: 'personId',
type: {
name: 'String'
}
},
persistedFaceIds: {
required: false,<|fim▁hole|> element: {
required: false,
serializedName: 'StringElementType',
type: {
name: 'String'
}
}
}
},
name: {
required: false,
serializedName: 'name',
type: {
name: 'String'
}
},
userData: {
required: false,
serializedName: 'userData',
type: {
name: 'String'
}
}
}
}
};
}
}
module.exports = PersonResult;<|fim▁end|> | serializedName: 'persistedFaceIds',
type: {
name: 'Sequence', |
<|file_name|>cache.rs<|end_file_name|><|fim▁begin|>#[serde(rename_all = "camelCase")]
#[derive(Debug, Deserialize, Serialize)]
pub struct PurgeOptions {
pub max_age: Option<i32>,<|fim▁hole|>pub struct GetCacheOptions {
pub no_cache: Option<bool>, // Skip usage of cache,
pub no_update: Option<bool>, // Use only cached data, do not try to update.
pub no_store: Option<bool>, // Skip storing fresh data if updated
pub min_fresh: Option<i32>, // Return cached data if not older than this many seconds. -1 means do not check age.
}<|fim▁end|> | }
#[serde(rename_all = "camelCase")]
#[derive(Debug, Deserialize, Serialize, Clone)] |
<|file_name|>ooc.tsx<|end_file_name|><|fim▁begin|>import { FeatureColorInput, Feature } from "../base";
<|fim▁hole|> description: "The color of your OOC messages.",
component: FeatureColorInput,
};<|fim▁end|> | export const ooccolor: Feature<string> = {
name: "OOC color",
category: "CHAT", |
<|file_name|>mongodb_data_test.go<|end_file_name|><|fim▁begin|>package mongodb
import (
"testing"
"time"
"github.com/influxdata/telegraf/testutil"
"github.com/stretchr/testify/assert"<|fim▁hole|>
func TestAddNonReplStats(t *testing.T) {
d := NewMongodbData(
&StatLine{
StorageEngine: "",
Time: time.Now(),
Insert: 0,
Query: 0,
Update: 0,
Delete: 0,
GetMore: 0,
Command: 0,
Flushes: 0,
Virtual: 0,
Resident: 0,
QueuedReaders: 0,
QueuedWriters: 0,
ActiveReaders: 0,
ActiveWriters: 0,
NetIn: 0,
NetOut: 0,
NumConnections: 0,
Passes: 0,
DeletedDocuments: 0,
},
tags,
)
var acc testutil.Accumulator
d.AddDefaultStats()
d.flush(&acc)
for key, _ := range DefaultStats {
assert.True(t, acc.HasInt64Field("mongodb", key))
}
}
func TestAddReplStats(t *testing.T) {
d := NewMongodbData(
&StatLine{
StorageEngine: "mmapv1",
Mapped: 0,
NonMapped: 0,
Faults: 0,
},
tags,
)
var acc testutil.Accumulator
d.AddDefaultStats()
d.flush(&acc)
for key, _ := range MmapStats {
assert.True(t, acc.HasInt64Field("mongodb", key))
}
}
func TestAddWiredTigerStats(t *testing.T) {
d := NewMongodbData(
&StatLine{
StorageEngine: "wiredTiger",
CacheDirtyPercent: 0,
CacheUsedPercent: 0,
},
tags,
)
var acc testutil.Accumulator
d.AddDefaultStats()
d.flush(&acc)
for key, _ := range WiredTigerStats {
assert.True(t, acc.HasFloatField("mongodb", key))
}
}
func TestStateTag(t *testing.T) {
d := NewMongodbData(
&StatLine{
StorageEngine: "",
Time: time.Now(),
Insert: 0,
Query: 0,
NodeType: "PRI",
NodeState: "PRIMARY",
},
tags,
)
stateTags := make(map[string]string)
var acc testutil.Accumulator
d.AddDefaultStats()
d.flush(&acc)
fields := map[string]interface{}{
"active_reads": int64(0),
"active_writes": int64(0),
"commands_per_sec": int64(0),
"deletes_per_sec": int64(0),
"flushes_per_sec": int64(0),
"getmores_per_sec": int64(0),
"inserts_per_sec": int64(0),
"member_status": "PRI",
"state": "PRIMARY",
"net_in_bytes": int64(0),
"net_out_bytes": int64(0),
"open_connections": int64(0),
"queries_per_sec": int64(0),
"queued_reads": int64(0),
"queued_writes": int64(0),
"repl_commands_per_sec": int64(0),
"repl_deletes_per_sec": int64(0),
"repl_getmores_per_sec": int64(0),
"repl_inserts_per_sec": int64(0),
"repl_queries_per_sec": int64(0),
"repl_updates_per_sec": int64(0),
"repl_lag": int64(0),
"resident_megabytes": int64(0),
"updates_per_sec": int64(0),
"vsize_megabytes": int64(0),
"ttl_deletes_per_sec": int64(0),
"ttl_passes_per_sec": int64(0),
"jumbo_chunks": int64(0),
}
acc.AssertContainsTaggedFields(t, "mongodb", fields, stateTags)
}<|fim▁end|> | )
var tags = make(map[string]string) |
<|file_name|>grpc_version.py<|end_file_name|><|fim▁begin|># Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are<|fim▁hole|>#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!!
VERSION='1.0.1'<|fim▁end|> | # met: |
<|file_name|>hosts_test.go<|end_file_name|><|fim▁begin|>package opts
import (
"fmt"
"testing"
)
func TestParseHost(t *testing.T) {
invalid := []string{
"anything",
"something with spaces",
"://",
"unknown://",
"tcp://:port",
"tcp://invalid",
"tcp://invalid:port",
}
valid := map[string]string{
"": DefaultHost,
" ": DefaultHost,
" ": DefaultHost,
"fd://": "fd://",
"fd://something": "fd://something",
"tcp://host:": fmt.Sprintf("tcp://host:%d", DefaultHTTPPort),
"tcp://": DefaultTCPHost,
"tcp://:2375": fmt.Sprintf("tcp://%s:2375", DefaultHTTPHost),
"tcp://:2376": fmt.Sprintf("tcp://%s:2376", DefaultHTTPHost),
"tcp://0.0.0.0:8080": "tcp://0.0.0.0:8080",
"tcp://192.168.0.0:12000": "tcp://192.168.0.0:12000",
"tcp://192.168:8080": "tcp://192.168:8080",
"tcp://0.0.0.0:1234567890": "tcp://0.0.0.0:1234567890", // yeah it's valid :P
" tcp://:7777/path ": fmt.Sprintf("tcp://%s:7777/path", DefaultHTTPHost),
"tcp://docker.com:2375": "tcp://docker.com:2375",
"unix://": "unix://" + DefaultUnixSocket,
"unix://path/to/socket": "unix://path/to/socket",
"npipe://": "npipe://" + DefaultNamedPipe,
"npipe:////./pipe/foo": "npipe:////./pipe/foo",
}
for _, value := range invalid {
if _, err := ParseHost(false, value); err == nil {
t.Errorf("Expected an error for %v, got [nil]", value)
}
}
for value, expected := range valid {
if actual, err := ParseHost(false, value); err != nil || actual != expected {
t.Errorf("Expected for %v [%v], got [%v, %v]", value, expected, actual, err)
}
}
}
func TestParseDockerDaemonHost(t *testing.T) {
invalids := map[string]string{<|fim▁hole|> "udp://127.0.0.1": "Invalid bind address format: udp://127.0.0.1",
"udp://127.0.0.1:2375": "Invalid bind address format: udp://127.0.0.1:2375",
"tcp://unix:///run/docker.sock": "Invalid bind address format: unix",
" tcp://:7777/path ": "Invalid bind address format: tcp://:7777/path ",
"tcp": "Invalid bind address format: tcp",
"unix": "Invalid bind address format: unix",
"fd": "Invalid bind address format: fd",
"": "Invalid bind address format: ",
}
valids := map[string]string{
"0.0.0.1:": "tcp://0.0.0.1:2375",
"0.0.0.1:5555": "tcp://0.0.0.1:5555",
"0.0.0.1:5555/path": "tcp://0.0.0.1:5555/path",
"[::1]:": "tcp://[::1]:2375",
"[::1]:5555/path": "tcp://[::1]:5555/path",
"[0:0:0:0:0:0:0:1]:": "tcp://[0:0:0:0:0:0:0:1]:2375",
"[0:0:0:0:0:0:0:1]:5555/path": "tcp://[0:0:0:0:0:0:0:1]:5555/path",
":6666": fmt.Sprintf("tcp://%s:6666", DefaultHTTPHost),
":6666/path": fmt.Sprintf("tcp://%s:6666/path", DefaultHTTPHost),
"tcp://": DefaultTCPHost,
"tcp://:7777": fmt.Sprintf("tcp://%s:7777", DefaultHTTPHost),
"tcp://:7777/path": fmt.Sprintf("tcp://%s:7777/path", DefaultHTTPHost),
"unix:///run/docker.sock": "unix:///run/docker.sock",
"unix://": "unix://" + DefaultUnixSocket,
"fd://": "fd://",
"fd://something": "fd://something",
"localhost:": "tcp://localhost:2375",
"localhost:5555": "tcp://localhost:5555",
"localhost:5555/path": "tcp://localhost:5555/path",
}
for invalidAddr, expectedError := range invalids {
if addr, err := parseDockerDaemonHost(invalidAddr); err == nil || err.Error() != expectedError {
t.Errorf("tcp %v address expected error %v return, got %s and addr %v", invalidAddr, expectedError, err, addr)
}
}
for validAddr, expectedAddr := range valids {
if addr, err := parseDockerDaemonHost(validAddr); err != nil || addr != expectedAddr {
t.Errorf("%v -> expected %v, got (%v) addr (%v)", validAddr, expectedAddr, err, addr)
}
}
}
func TestParseTCP(t *testing.T) {
var (
defaultHTTPHost = "tcp://127.0.0.1:2376"
)
invalids := map[string]string{
"0.0.0.0": "Invalid bind address format: 0.0.0.0",
"tcp:a.b.c.d": "Invalid bind address format: tcp:a.b.c.d",
"tcp:a.b.c.d/path": "Invalid bind address format: tcp:a.b.c.d/path",
"udp://127.0.0.1": "Invalid proto, expected tcp: udp://127.0.0.1",
"udp://127.0.0.1:2375": "Invalid proto, expected tcp: udp://127.0.0.1:2375",
}
valids := map[string]string{
"": defaultHTTPHost,
"tcp://": defaultHTTPHost,
"0.0.0.1:": "tcp://0.0.0.1:2376",
"0.0.0.1:5555": "tcp://0.0.0.1:5555",
"0.0.0.1:5555/path": "tcp://0.0.0.1:5555/path",
":6666": "tcp://127.0.0.1:6666",
":6666/path": "tcp://127.0.0.1:6666/path",
"tcp://:7777": "tcp://127.0.0.1:7777",
"tcp://:7777/path": "tcp://127.0.0.1:7777/path",
"[::1]:": "tcp://[::1]:2376",
"[::1]:5555": "tcp://[::1]:5555",
"[::1]:5555/path": "tcp://[::1]:5555/path",
"[0:0:0:0:0:0:0:1]:": "tcp://[0:0:0:0:0:0:0:1]:2376",
"[0:0:0:0:0:0:0:1]:5555": "tcp://[0:0:0:0:0:0:0:1]:5555",
"[0:0:0:0:0:0:0:1]:5555/path": "tcp://[0:0:0:0:0:0:0:1]:5555/path",
"localhost:": "tcp://localhost:2376",
"localhost:5555": "tcp://localhost:5555",
"localhost:5555/path": "tcp://localhost:5555/path",
}
for invalidAddr, expectedError := range invalids {
if addr, err := parseTCPAddr(invalidAddr, defaultHTTPHost); err == nil || err.Error() != expectedError {
t.Errorf("tcp %v address expected error %v return, got %s and addr %v", invalidAddr, expectedError, err, addr)
}
}
for validAddr, expectedAddr := range valids {
if addr, err := parseTCPAddr(validAddr, defaultHTTPHost); err != nil || addr != expectedAddr {
t.Errorf("%v -> expected %v, got %v and addr %v", validAddr, expectedAddr, err, addr)
}
}
}
func TestParseInvalidUnixAddrInvalid(t *testing.T) {
if _, err := parseSimpleProtoAddr("unix", "tcp://127.0.0.1", "unix:///var/run/docker.sock"); err == nil || err.Error() != "Invalid proto, expected unix: tcp://127.0.0.1" {
t.Fatalf("Expected an error, got %v", err)
}
if _, err := parseSimpleProtoAddr("unix", "unix://tcp://127.0.0.1", "/var/run/docker.sock"); err == nil || err.Error() != "Invalid proto, expected unix: tcp://127.0.0.1" {
t.Fatalf("Expected an error, got %v", err)
}
if v, err := parseSimpleProtoAddr("unix", "", "/var/run/docker.sock"); err != nil || v != "unix:///var/run/docker.sock" {
t.Fatalf("Expected an %v, got %v", v, "unix:///var/run/docker.sock")
}
}<|fim▁end|> | "0.0.0.0": "Invalid bind address format: 0.0.0.0",
"tcp:a.b.c.d": "Invalid bind address format: tcp:a.b.c.d",
"tcp:a.b.c.d/path": "Invalid bind address format: tcp:a.b.c.d/path", |
<|file_name|>MIEApplication.java<|end_file_name|><|fim▁begin|>package com.github.gilz688.mifeditor;
import com.github.gilz688.mifeditor.proto.MIEView;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.Scene;
import javafx.scene.layout.AnchorPane;
import javafx.stage.Stage;
public class MIEApplication extends Application {
private Stage primaryStage;
private AnchorPane rootLayout;
public static final String APPLICATION_NAME = "MIF Image Editor";
@Override
public void start(Stage primaryStage) {
this.primaryStage = primaryStage;
try {
// Load root layout from fxml file.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(MIEApplication.class
.getResource("view/MIE.fxml"));
rootLayout = (AnchorPane) loader.load();
// Show the scene containing the root layout.
Scene scene = new Scene(rootLayout);
scene.getStylesheets().add(
getClass().getResource("application.css").toExternalForm());
primaryStage.setTitle(APPLICATION_NAME);
primaryStage.setScene(scene);
primaryStage.show();
<|fim▁hole|> view.setStage(primaryStage);
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
launch(args);
}
}<|fim▁end|> | final MIEView view = (MIEView) loader.getController();
|
<|file_name|>xprod.test.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>
describe('xprod', () => {
describe('[a] -> [b]', () => {
it('-> [ (a, b) ]', () => {
const xs = [ 1, 2, 3 ]
const ys = [ 'a', 'b', 'c' ]
assert.deepEqual(
xprod(xs, ys),
[ [ 1, 'a' ], [ 1, 'b' ], [ 1, 'c' ], [ 2, 'a' ], [ 2, 'b' ], [ 2, 'c' ], [ 3, 'a' ], [ 3, 'b' ], [ 3, 'c' ] ],
)
})
})
})<|fim▁end|> | import * as assert from 'assert'
import { xprod } from './xprod' |
<|file_name|>extension.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use super::WebGLExtensions;
use canvas_traits::webgl::WebGLVersion;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::trace::JSTraceable;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
/// Trait implemented by WebGL extensions.
pub trait WebGLExtension: Sized
where
Self::Extension: DomObject + JSTraceable,
{
type Extension;<|fim▁hole|> /// Returns which WebGL spec is this extension written against.
fn spec() -> WebGLExtensionSpec;
/// Checks if the extension is supported.
fn is_supported(ext: &WebGLExtensions) -> bool;
/// Enable the extension.
fn enable(ext: &WebGLExtensions);
/// Name of the WebGL Extension.
fn name() -> &'static str;
}
pub enum WebGLExtensionSpec {
/// Extensions written against both WebGL and WebGL2 specs.
All,
/// Extensions writen against a specific WebGL version spec.
Specific(WebGLVersion),
}<|fim▁end|> |
/// Creates the DOM object of the WebGL extension.
fn new(ctx: &WebGLRenderingContext) -> DomRoot<Self::Extension>;
|
<|file_name|>LoginActivity.java<|end_file_name|><|fim▁begin|>/*
* Licensed to Cloudkick, Inc ('Cloudkick') under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Cloudkick licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudkick;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.Toast;
public class LoginActivity extends Activity {
private static final int SETTINGS_ACTIVITY_ID = 0;
RelativeLayout loginView = null;
private String user = null;
private String pass = null;
private ProgressDialog progress = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.login);
setTitle("Cloudkick for Android");
findViewById(R.id.button_login).setOnClickListener(new LoginClickListener());
findViewById(R.id.button_signup).setOnClickListener(new SignupClickListener());
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == SETTINGS_ACTIVITY_ID) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this);
if (prefs.getString("editKey", "").equals("") && prefs.getString("editSecret", "").equals("")) {
finish();
}
else {
Intent result = new Intent();
result.putExtra("login", true);
setResult(Activity.RESULT_OK, result);
finish();
}
}
}
private class LoginClickListener implements View.OnClickListener {
public void onClick(View v) {
new AccountLister().execute();
}
}
private class SignupClickListener implements View.OnClickListener {
public void onClick(View v) {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.cloudkick.com/pricing/")));
}
}
private class AccountLister extends AsyncTask<Void, Void, ArrayList<String>>{
private Integer statusCode = null;
@Override
protected void onPreExecute() {
user = ((EditText) findViewById(R.id.input_email)).getText().toString();
pass = ((EditText) findViewById(R.id.input_password)).getText().toString();
progress = ProgressDialog.show(LoginActivity.this, "", "Logging In...", true);
}
@Override
protected ArrayList<String> doInBackground(Void...voids) {
ArrayList<String> accounts = new ArrayList<String>();
try {
HttpClient client = new DefaultHttpClient();
HttpPost post = new HttpPost("https://www.cloudkick.com/oauth/list_accounts/");
ArrayList<NameValuePair> values = new ArrayList<NameValuePair>(2);
values.add(new BasicNameValuePair("user", user));
values.add(new BasicNameValuePair("password", pass));
post.setEntity(new UrlEncodedFormEntity(values));
HttpResponse response = client.execute(post);
statusCode = response.getStatusLine().getStatusCode();
InputStream is = response.getEntity().getContent();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
while ((line = rd.readLine()) != null) {
accounts.add(line);
Log.i("LoginActivity", line);
}
}
catch (Exception e) {
e.printStackTrace();
statusCode = 0;
}
return accounts;
}
@Override
protected void onPostExecute(ArrayList<String> accounts) {
switch (statusCode) {
case 200:
if (accounts.size() == 1) {
new KeyRetriever().execute(accounts.get(0));
}
else {
String[] tmpAccountArray = new String[accounts.size()];
final String[] accountArray = accounts.toArray(tmpAccountArray);
AlertDialog.Builder builder = new AlertDialog.Builder(LoginActivity.this);
builder.setTitle("Select an Account");
builder.setItems(accountArray, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
new KeyRetriever().execute(accountArray[item]);
}
});
AlertDialog selectAccount = builder.create();
selectAccount.show();
}
break;
case 400:
progress.dismiss();
if (accounts.get(0).equals("You have enabled multi factor authentication for this account. To access the API key list, please visit the website.")) {
AlertDialog.Builder builder = new AlertDialog.Builder(LoginActivity.this);
builder.setTitle("MFA is Enabled");
String mfaMessage = ("You appear to have multi-factor authentication enabled on your account. "
+ "You will need to manually create an API key with read permissions in the "
+ "web interface, then enter it directly in the settings panel.");
builder.setMessage(mfaMessage);
builder.setPositiveButton("Settings", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
Intent settingsActivity = new Intent(getBaseContext(), Preferences.class);
startActivityForResult(settingsActivity, SETTINGS_ACTIVITY_ID);
}
});
AlertDialog mfaDialog = builder.create();
mfaDialog.show();
}
else {
Toast.makeText(LoginActivity.this, "Invalid Username or Password", Toast.LENGTH_LONG).show();
}
break;<|fim▁hole|> }
}
private class KeyRetriever extends AsyncTask<String, Void, String[]>{
private Integer statusCode = null;
@Override
protected String[] doInBackground(String...accts) {
Log.i("LoginActivity", "Selected Account: " + accts[0]);
String[] creds = new String[2];
try {
HttpClient client = new DefaultHttpClient();
HttpPost post = new HttpPost("https://www.cloudkick.com/oauth/create_consumer/");
ArrayList<NameValuePair> values = new ArrayList<NameValuePair>(2);
values.add(new BasicNameValuePair("user", user));
values.add(new BasicNameValuePair("password", pass));
values.add(new BasicNameValuePair("account", accts[0]));
values.add(new BasicNameValuePair("system", "Cloudkick for Android"));
values.add(new BasicNameValuePair("perm_read", "True"));
values.add(new BasicNameValuePair("perm_write", "False"));
values.add(new BasicNameValuePair("perm_execute", "False"));
post.setEntity(new UrlEncodedFormEntity(values));
HttpResponse response = client.execute(post);
statusCode = response.getStatusLine().getStatusCode();
Log.i("LoginActivity", "Return Code: " + statusCode);
InputStream is = response.getEntity().getContent();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
for (int i = 0; i < 2; i++) {
line = rd.readLine();
if (line == null) {
return creds;
}
creds[i] = line;
}
}
catch (Exception e) {
statusCode = 0;
}
return creds;
}
@Override
protected void onPostExecute(String[] creds) {
progress.dismiss();
if (statusCode != 200) {
// Show short error messages - this is a dirty hack
if (creds[0] != null && creds[0].startsWith("User with role")) {
Toast.makeText(LoginActivity.this, creds[0], Toast.LENGTH_LONG).show();
}
else {
Toast.makeText(LoginActivity.this, "An Error Occurred on Login", Toast.LENGTH_LONG).show();
return;
}
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this);
SharedPreferences.Editor editor = prefs.edit();
editor.putString("editKey", creds[0]);
editor.putString("editSecret", creds[1]);
editor.commit();
Intent result = new Intent();
result.putExtra("login", true);
setResult(Activity.RESULT_OK, result);
LoginActivity.this.finish();
}
}
}<|fim▁end|> | default:
progress.dismiss();
Toast.makeText(LoginActivity.this, "An Error Occurred Retrieving Your Accounts", Toast.LENGTH_LONG).show();
}; |
<|file_name|>test_telemetry_full.py<|end_file_name|><|fim▁begin|>"""
telemetry full tests.
"""
import platform
import sys
from unittest import mock
import pytest
import wandb
def test_telemetry_finish(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
run = wandb.init()
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
assert telemetry and 2 in telemetry.get("3", [])
def test_telemetry_imports_hf(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
run = wandb.init()
with mock.patch.dict("sys.modules", {"transformers": mock.Mock()}):
import transformers
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry<|fim▁hole|> assert telemetry and 11 in telemetry.get("2", [])
def test_telemetry_imports_catboost(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
with mock.patch.dict("sys.modules", {"catboost": mock.Mock()}):
import catboost
run = wandb.init()
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
# catboost in both init and finish modules
assert telemetry and 7 in telemetry.get("1", [])
assert telemetry and 7 in telemetry.get("2", [])
@pytest.mark.skipif(
platform.system() == "Windows", reason="test suite does not build jaxlib on windows"
)
@pytest.mark.skipif(sys.version_info >= (3, 10), reason="jax has no py3.10 wheel")
def test_telemetry_imports_jax(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
import jax
wandb.init()
wandb.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
# jax in finish modules but not in init modules
assert telemetry and 12 in telemetry.get("1", [])
assert telemetry and 12 in telemetry.get("2", [])
def test_telemetry_run_organizing_init(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
wandb.init(name="test_name", tags=["my-tag"], config={"abc": 123}, id="mynewid")
wandb.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
assert telemetry and 13 in telemetry.get("3", []) # name
assert telemetry and 14 in telemetry.get("3", []) # id
assert telemetry and 15 in telemetry.get("3", []) # tags
assert telemetry and 16 in telemetry.get("3", []) # config
def test_telemetry_run_organizing_set(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
run = wandb.init()
run.name = "test-name"
run.tags = ["tag1"]
wandb.config.update = True
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
assert telemetry and 17 in telemetry.get("3", []) # name
assert telemetry and 18 in telemetry.get("3", []) # tags
assert telemetry and 19 in telemetry.get("3", []) # config update<|fim▁end|> |
# hf in finish modules but not in init modules
assert telemetry and 11 not in telemetry.get("1", []) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.contrib.auth.models import AbstractUser
from django.db import models<|fim▁hole|>
@python_2_unicode_compatible
class User(AbstractUser):
# First Name and Last Name do not cover name patterns
# around the globe.
name = models.CharField(_("Name of User"), blank=True, max_length=255)
def __str__(self):
return self.username<|fim▁end|> | from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
|
<|file_name|>trait-resolution-breakage.rs<|end_file_name|><|fim▁begin|>// check-pass
trait Trait<T> {
const ASSOC_CONST: usize = 0;
}
impl Trait<()> for u8 {}
// `u8::ASSOC_CONST` is resolved today, but will be ambiguous
// under lazy normalization.<|fim▁hole|> todo!()
}
fn main() {}<|fim▁end|> | fn foo<T, U>() -> [(T, U); u8::ASSOC_CONST]
where
u8: Trait<T> + Trait<U>,
{ |
<|file_name|>lastfm.js<|end_file_name|><|fim▁begin|>//require last.fm api client
var LastFmNode = require('lastfm').LastFmNode;
//get api keys from config file
var config = require('../config.js');
//save.js to save json
var save = require('../save.js');
// fs to open json
var fs = require('fs');
//initialize api client
var lastfm = new LastFmNode({
api_key: config.lastfm.key, // sign-up for a key at http://www.last.fm/api
secret: config.lastfm.secret,
useragent: 'api.lukemil.es' // optional. defaults to lastfm-node.
});
// This job returns weekly last.fm play data.
job('music-weekly', function(done) {
//get user's weekly artist chart to do weekly play count
var request = lastfm.request("user.getWeeklyArtistChart", {
user: config.lastfm.username,
handlers: {
success: function(data) {
//create object to later save
var weeklySongs = new Object;
weeklySongs.interval = 7;
//eliminate unneeded data
data = data.weeklyartistchart.artist;
//get list of keys
var artistkeys = Object.keys(data);
// initialize plays variable
weeklySongs.plays = 0;
//initialize top artist variable
weeklySongs.topArtist = new Object;
// add number of unique artists to object
weeklySongs.uniqueArtists = artistkeys.length;
// iterate through keys
for( var i = 0, length = artistkeys.length; i < length; i++ ) {
//we have to do parseInt() because the JSON has the number as a string
weeklySongs.plays = parseInt(data[artistkeys[ i ] ].playcount) + weeklySongs.plays;
// save artist which is number 1
if (parseInt(data[artistkeys[i]]['@attr'].rank) === 1) {
weeklySongs.topArtist.name = data[artistkeys[i]].name;
weeklySongs.topArtist.count = parseInt(data[artistkeys[i]].playcount);
}
}
save.file("music-weekly", weeklySongs);
console.log('Weekly last.fm data updated.');
},
error: function(error) {
return;
}
}
});
}).every('1h');
// gets recent last fm data
job('music-recent', function(done) {
var request = lastfm.request("user.getRecentTracks", {
user: config.lastfm.username,
handlers: {
success: function(data) {
//create object to later save
var recentSongs = new Object;
// create object of just songs
recentSongs.songs = [];
recentSongs.interval = 1;
//eliminate unneeded data
recentSongs.nowPlaying = (data.recenttracks.track[0]["@attr"]) ? true : false;
data = data.recenttracks.track;
//iterate through artist data...
//get list of keys
var keys = Object.keys(data);
// iterate through keys
for(var i = 0, length = keys.length; i < length; i++) {
//create temport object for song
song = new Object;
// create temporary object for working song from api
lastSong = data[keys[i]];
//scoop up the data we want
song.artist = lastSong.artist["#text"];
song.name = lastSong.name;
song.album = lastSong.album["#text"];
song.image = lastSong.image[lastSong.image.length - 1]["#text"];
song.url = lastSong.url;
// convert spaces to plusses and construct URL
song.artistUrl = "http://www.last.fm/music/" + lastSong.artist["#text"].replace(/\s+/g, '+');
// cannot figure out why this line creates the error
// [TypeError: Cannot read property '#time' of undefined]
// it worked at first during my testing and stopped
// song["time"] = lastSong["date"]["#time"];
// song.date_unix = lastSong["date"].uts
//store data in main object
recentSongs.songs[keys[i]] = song;
}
save.file("music-recent", recentSongs);
console.log('Recent last.fm data updated.');
done(recentSongs);
},
error: function(error) {
console.log(error);
return;
}
}
});
}).every('90s');
job('music-combine', function(done, musicRecent) {
// only combine file if music-weekly exists
path = "json/music-weekly.json";
fs.exists(path, function(exists) {
if (exists) {
// synchronously open the weekly file
var musicWeekly = JSON.parse(fs.readFileSync(path).toString());
// create new object to dump data in<|fim▁hole|> // merge files into one object
music.nowPlaying = musicRecent.nowPlaying;
music.recent = musicRecent.songs;
music.weeklyPlays = musicWeekly.plays;
music.weeklyTopArtist = musicWeekly.topArtist;
music.weeklyUniqueArtists = musicWeekly.uniqueArtists;
// save to music.json
console.log('music.json updated');
save.file("music", music);
}
});
}).after('music-recent');<|fim▁end|> | var music = new Object;
|
<|file_name|>reading_scalar_data.tests.ts<|end_file_name|><|fim▁begin|>import { expect } from 'chai';
import { ModelApiManager } from '../../api/manager';
import * as models from '../__fixtures__/models';
import { graphql, GraphQLSchema } from 'graphql';
import { ModelManager, fields } from 'rev-models';
import { createData, IModelTestData } from '../__fixtures__/modeldata';
import { GraphQLApi } from '../api';
describe('GraphQL query type - scalar model data', () => {
describe('When model has no data', () => {
let apiManager: ModelApiManager;
let api: GraphQLApi;
let schema: GraphQLSchema;
let modelManager: ModelManager;
before(() => {
modelManager = models.getModelManager();
apiManager = new ModelApiManager(modelManager);
apiManager.register(models.Post, { operations: ['read'] });
apiManager.register(models.User, { operations: ['read'] });<|fim▁hole|> schema = api.getSchema();
});
it('a query returns an empty array', async () => {
const query = `
query {
Post {
results {
id,
title,
body,
published,
post_date
}
}
}
`;
const result = await graphql(schema, query);
expect(result.data!.Post.results).to.deep.equal([]);
});
});
describe('When model has data', () => {
let apiManager: ModelApiManager;
let api: GraphQLApi;
let schema: GraphQLSchema;
let modelManager: ModelManager;
let expectedData: IModelTestData;
beforeEach(async () => {
modelManager = models.getModelManager();
apiManager = new ModelApiManager(modelManager);
apiManager.register(models.Post, { operations: ['read'] });
apiManager.register(models.User, { operations: ['read'] });
apiManager.register(models.Comment, { operations: ['read'] });
api = new GraphQLApi(apiManager);
expectedData = await createData(modelManager);
schema = api.getSchema();
});
it('a query returns the expected data', async () => {
const query = `
query {
Post {
results {
id,
title,
body,
published,
post_date
}
}
}
`;
const result = await graphql(schema, query);
expect(result.data!.Post.results).to.have.length(expectedData.posts.length);
for (let i = 0; i < expectedData.posts.length; i++) {
expect(result.data!.Post.results[i]).to.deep.equal({
id: expectedData.posts[i].id,
title: expectedData.posts[i].title,
body: expectedData.posts[i].body,
published: expectedData.posts[i].published,
post_date: expectedData.posts[i].post_date,
});
}
});
});
describe('Can read all default supported scalar field types', () => {
let apiManager: ModelApiManager;
let schema: GraphQLSchema;
let modelManager: ModelManager;
let expectedData: models.ModelWithAllScalarFields;
beforeEach(async () => {
modelManager = models.getModelManager();
apiManager = new ModelApiManager(modelManager);
apiManager.register(models.ModelWithAllScalarFields, { operations: ['read'] });
expectedData = new models.ModelWithAllScalarFields({
autoNumberField: 1,
integerField: 2,
numberField: 3.456,
textField: 'A test model with all default field types',
emailField: '[email protected]',
urlField: 'http://www.test.com',
passwordField: 'password123',
booleanField: true,
selectField: 'Y',
multiSelectField: ['A', 'B'],
dateField: '2017-12-25',
timeField: '12:13:14',
dateTimeField: '2017-12-25T12:13:14'
});
await modelManager.create(expectedData);
schema = apiManager.getGraphQLSchema();
});
it('a query returns the expected data', async () => {
const query = `
query {
ModelWithAllScalarFields {
results {
autoNumberField
integerField
numberField
textField
emailField
urlField
passwordField
booleanField
selectField
multiSelectField
dateField
timeField
dateTimeField
}
}
}
`;
const result = await graphql(schema, query);
expect(result.data!.ModelWithAllScalarFields.results).to.have.length(1);
expect(result.data!.ModelWithAllScalarFields.results[0]).to.deep.equal({
autoNumberField: 1,
integerField: expectedData.integerField,
numberField: expectedData.numberField,
textField: expectedData.textField,
emailField: expectedData.emailField,
urlField: expectedData.urlField,
passwordField: expectedData.passwordField,
booleanField: expectedData.booleanField,
selectField: expectedData.selectField,
multiSelectField: expectedData.multiSelectField,
dateField: expectedData.dateField,
timeField: expectedData.timeField,
dateTimeField: expectedData.dateTimeField
});
});
});
describe('Can override Model -> GraphQL field conversion', () => {
let apiManager: ModelApiManager;
let api: GraphQLApi;
let schema: GraphQLSchema;
let modelManager: ModelManager;
let data: models.ModelWithAllScalarFields;
beforeEach(async () => {
modelManager = models.getModelManager();
apiManager = new ModelApiManager(modelManager);
apiManager.register(models.ModelWithAllScalarFields, { operations: ['read'] });
api = new GraphQLApi(apiManager);
api.fieldMappings.forEach((converter) => {
if (converter[0] == fields.TextFieldBase) {
converter[1].converter = (model, fieldName) => {
return 'I am a custom converter!';
};
}
});
data = new models.ModelWithAllScalarFields({
integerField: 2,
numberField: 3.456,
textField: 'I should be overridden...',
emailField: '[email protected]',
urlField: 'http://www.test.com',
passwordField: 'password123',
booleanField: true,
selectField: 'Y',
multiSelectField: ['A', 'B'],
dateField: '2017-12-25',
timeField: '12:13:14',
dateTimeField: '2017-12-25T12:13:14'
});
await modelManager.create(data);
schema = api.getSchema();
});
it('a query returns the expected data', async () => {
const query = `
query {
ModelWithAllScalarFields {
results {
autoNumberField
integerField
numberField
textField
emailField
urlField
passwordField
booleanField
selectField
multiSelectField
dateField
timeField
dateTimeField
}
}
}
`;
const result = await graphql(schema, query);
expect(result.data!.ModelWithAllScalarFields.results).to.have.length(1);
expect(result.data!.ModelWithAllScalarFields.results[0].textField).to.equal('I am a custom converter!');
});
});
});<|fim▁end|> | apiManager.register(models.Comment, { operations: ['read'] });
api = new GraphQLApi(apiManager);
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='poloniex',<|fim▁hole|> 'poloniex.wamp',
'poloniex.api'
],
include_package_data=True,
description='Python Poloniex API',
long_description=README,
url='https://github.com/absortium/poloniex.git',
author='Andrey Samokhvalov',
license='MIT',
author_email='[email protected]',
install_requires=[
'asyncio',
'aiohttp',
'autobahn',
'pp-ez',
'requests'
],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
],
)<|fim▁end|> | version='0.1',
packages=[
'poloniex', |
<|file_name|>line_plot.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2000-2005 by Yasushi Saito ([email protected])
#
# Jockey is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any
# later version.
#
# Jockey is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
import tick_mark
import line_style
import pychart_util
import error_bar
import chart_object
import legend
import object_set
import line_plot_doc
import theme
from pychart_types import *
from types import *
default_width = 1.2
line_style_itr = None
_keys = {
'data' : (AnyType, None, pychart_util.data_desc),
'label': (StringType, '???', pychart_util.label_desc),
'data_label_offset': (CoordType, (0, 5),
"""The location of data labels relative to the sample point. Meaningful only when data_label_format != None."""),
'data_label_format': (FormatType, None,
"""The format string for the label printed
beside a sample point.
It can be a `printf' style format string, or
a two-parameter function that takes the (x, y)
values and returns a string. """
+ pychart_util.string_desc),
'xcol' : (IntType, 0, pychart_util.xcol_desc),
'ycol': (IntType, 1, pychart_util.ycol_desc),
'y_error_minus_col': (IntType, 2,
"""The column (within "data") from which the depth of the errorbar is extracted. Meaningful only when error_bar != None. <<error_bar>>"""),
'y_error_plus_col': (IntType, -1,
"""The column (within "data") from which the height of the errorbar is extracted. Meaningful only when error_bar != None. <<error_bar>>"""),
'y_qerror_minus_col': (IntType, -1, '<<error_bar>>'),
'y_qerror_plus_col': (IntType, -1, '<<error_bar>>'),
'line_style': (line_style.T, lambda: line_style_itr.next(), pychart_util.line_desc,
"By default, a style is picked from standard styles round-robin. <<line_style>>"),
'tick_mark': (tick_mark.T, None, pychart_util.tick_mark_desc),
'error_bar': (error_bar.T, None,
'The style of the error bar. <<error_bar>>'),
}
class T(chart_object.T):
__doc__ = line_plot_doc.doc
keys = _keys
def check_integrity(self):
assert chart_object.T.check_integrity(self)
##AUTOMATICALLY GENERATED
##END AUTOMATICALLY GENERATED
def get_data_range(self, which):
if which == 'X':
return pychart_util.get_data_range(self.data, self.xcol)
else:
return pychart_util.get_data_range(self.data, self.ycol)
def get_legend_entry(self):
if self.label:
line_style = self.line_style
if not line_style and self.error_bar:
line_style = getattr(self.error_bar, 'line_style', None) or \
getattr(self.error_bar, 'hline_style', None) or \
getattr(self.error_bar, 'vline_style', None)
if not line_style:
raise Exception, 'Line plot has label, but an empty line style and error bar.'
return legend.Entry(line_style=line_style,
tick_mark=self.tick_mark,
fill_style=None,
label=self.label)
return None
def draw(self, ar, can):
# Draw the line
clipbox = theme.adjust_bounding_box([ar.loc[0], ar.loc[1],
ar.loc[0] + ar.size[0],
ar.loc[1] + ar.size[1]]);
can.clip(clipbox[0],clipbox[1],clipbox[2],clipbox[3])
if self.line_style:
points = []
for pair in self.data:
yval = pychart_util.get_sample_val(pair, self.ycol)
xval = pair[self.xcol]
if None not in (xval, yval):
points.append((ar.x_pos(xval), ar.y_pos(yval)))
can.lines(self.line_style, points)
can.endclip()
# Draw tick marks and error bars
can.clip(ar.loc[0] - 10, ar.loc[1] - 10,
ar.loc[0] + ar.size[0] + 10,
ar.loc[1] + ar.size[1] + 10)<|fim▁hole|> x = pair[self.xcol]
y = pychart_util.get_sample_val(pair, self.ycol)
if None in (x, y): continue
x_pos = ar.x_pos(x)
y_pos = ar.y_pos(y)
if self.error_bar:
plus = pair[self.y_error_plus_col or self.y_error_minus_col]
minus = pair[self.y_error_minus_col or self.y_error_plus_col]
if self.y_qerror_minus_col or self.y_qerror_plus_col:
q_plus = pair[self.y_qerror_plus_col or self.y_qerror_minus_col]
q_minus = pair[self.y_qerror_minus_col or self.y_qerror_plus_col]
if None not in (minus,plus,q_minus,q_plus):
self.error_bar.draw(can, (x_pos, y_pos),
ar.y_pos(y - minus),
ar.y_pos(y + plus),
ar.y_pos(y - q_minus),
ar.y_pos(y + q_plus))
else:
if None not in (minus,plus): #PDS
self.error_bar.draw(can, (x_pos, y_pos),
ar.y_pos(y - minus),
ar.y_pos(y + plus))
if self.tick_mark:
self.tick_mark.draw(can, x_pos, y_pos)
if self.data_label_format:
can.show(x_pos + self.data_label_offset[0],
y_pos + self.data_label_offset[1],
'/hC' + pychart_util.apply_format(self.data_label_format, (x, y), 1))
can.endclip()
def init():
global line_style_itr
line_styles = object_set.T()
for org_style in line_style.standards.list():
style = line_style.T(width = default_width, color = org_style.color,
dash = org_style.dash)
line_styles.add(style)
line_style_itr = line_styles.iterate()
theme.add_reinitialization_hook(init)<|fim▁end|> | for pair in self.data: |
<|file_name|>webhosting.py<|end_file_name|><|fim▁begin|>"""
Implementation of the WebhostingService API endpoint
"""
from transip.client import MODE_RW, Client
class WebhostingService(Client):
"""
Transip_WebhostingService
"""
def __init__(self, *args, **kwargs):
super().__init__('WebhostingService', *args, **kwargs)
def get_webhosting_domain_names(self):
"""
Transip_WebhostingService::getWebhostingDomainNames
"""
return self._simple_request('getWebhostingDomainNames')
def get_available_packages(self):
"""
Transip_WebhostingService::getAvailablePackages
"""
return self._simple_request('getAvailablePackages')
def get_info(self, domain):
"""
Transip_WebhostingService::getInfo
"""
return self._simple_request('getInfo', domain)
def get_available_upgrades(self, domain):
"""
Transip_WebhostingService::getAvailableUpgrades
"""
return self._simple_request('getAvailableUpgrades', domain)
def create_mailbox(self, domain, mailbox):
"""
Transip_WebhostingService::createMailBox
"""
return self._simple_request('createMailBox', domain, mailbox, mode=MODE_RW)
def set_mailbox_password(self, domain, mailbox, password):
"""
Transip_WebhostingService::setMailBoxPassword
"""
return self._simple_request('setMailBoxPassword', domain, mailbox, password, mode=MODE_RW)
def update_mailbox(self, domain, mailbox):
"""
Transip_WebhostingService::modifyMailBox
"""<|fim▁hole|> def delete_mailbox(self, domain, mailbox):
"""
Transip_WebhostingService::deleteMailBox
"""
return self._simple_request('deleteMailBox', domain, mailbox, mode=MODE_RW)
def create_mail_forward(self, domain, mailforward):
"""
Transip_WebhostingService::createMailForward
"""
return self._simple_request('createMailForward', domain, mailforward, mode=MODE_RW)
def update_mail_forward(self, domain, mailforward):
"""
Transip_WebhostingService::modifyMailForward
"""
return self._simple_request('modifyMailForward', domain, mailforward, mode=MODE_RW)
def delete_mail_forward(self, domain, mailforward):
"""
Transip_WebhostingService::deleteMailForward
"""
return self._simple_request('deleteMailForward', domain, mailforward, mode=MODE_RW)<|fim▁end|> | return self._simple_request('modifyMailBox', domain, mailbox, mode=MODE_RW)
|
<|file_name|>widget.js<|end_file_name|><|fim▁begin|>WAF.define('WakendoColorPicker', ['waf-core/widget', 'wakendoCore'], function(widget, $) {
'use strict';
var KendoColorPicker = widget.create('WakendoColorPicker', {
value: widget.property({
type: 'string'
}),
flat: widget.property({
type: 'boolean',
defaultValue: false
}),
init: function() {
var self = this;
self.valueChangeSubscriber = self.value.onChange(function(newValue) {
self.kendoWidget.value(newValue);
});
self.flat.onChange(self.render);
self.render();<|fim▁hole|> render: function() {
var self = this;
$(self.node).empty();
var options = {
change: function(event) {
self.valueChangeSubscriber.pause();
self.value(event.value);
self.valueChangeSubscriber.resume();
}
};
if (self.flat()) {
var $el = $(self.node);
$el.kendoFlatColorPicker(options);
self.kendoWidget = $el.data("kendoFlatColorPicker");
} else {
var $el = $('<input />').appendTo(self.node);
$el.kendoColorPicker(options);
self.kendoWidget = $el.data("kendoColorPicker");
}
},
open: function() {
this.kendoWidget.open();
},
close: function() {
this.kendoWidget.close();
},
enable: function() {
this.kendoWidget.enable();
},
disable: function() {
this.kendoWidget.enable(false);
}
});
return KendoColorPicker;
});<|fim▁end|> | },
|
<|file_name|>pe572-idempotent-matrices.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=utf-8
"""572. Idempotent matrices
https://projecteuler.net/problem=572
A matrix $M$ is called idempotent if $M^2 = M$.
Let $M$ be a three by three matrix : $M=\begin{pmatrix} a & b & c\\\ d & e &
f\\\ g &h &i\\\ \end{pmatrix}$.
Let C(n) be the number of idempotent three by three matrices $M$ with integer
elements such that <|fim▁hole|>$ -n \le a,b,c,d,e,f,g,h,i \le n$.
C(1)=164 and C(2)=848.
Find C(200).
"""<|fim▁end|> | |
<|file_name|>mismatch.hpp<|end_file_name|><|fim▁begin|>// $Author: benine $
// $Date$
// $Log$
// Contains the mismatch class for afin
#ifndef MISMATCH_H
#define MISMATCH_H
//////////////////////////////////////////////\
// Mismatch Class: ////////////////////////////>
//////////////////////////////////////////////
//
// Mismatch object, contains all classes, methods, data, and data references necessary for processing mismatches for contig fusion
// There will be only one Process object needed per iteration of this program
class Mismatch{
private:
double score;
int length;
int index_i;
int index_j;
int end_i;
int end_j;
public:
Mismatch();
Mismatch( double score, int length, int index_i, int index_j, int end_i, int end_j );
// set mismatch score
void set_score( double score );
// set length
void set_length( int length );
// set index_i
void set_index_i( int index );
// set index_j
void set_index_j( int index );
// set index
void set_indices( int index_i, int index_j );
// set end_i
void set_end_i( int end_i );
// set end_j
void set_end_j( int end_j );
// return mismatch score
double get_score();
<|fim▁hole|>
// return index i
int get_index_i();
// return index j
int get_index_j();
// return end_i
int get_end_i();
// return end_j
int get_end_j();
};
#endif<|fim▁end|> | // return length
int get_length(); |
<|file_name|>specs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8
import re
import copy
import humanfriendly
import json
import jsonpickle
from lain_sdk.yaml.parser import ProcType, resource_instance_name
from .utils import get_system_volumes_from_etcd
class AppType:
Normal = 'app'
Service = 'service'
Resource = 'resource'
ResourceInstance = 'resource-instance'
class RestartPolicy:
Never = 0
Always = 1
OnFail = 2
class DependencyPolicy:
NamespaceLevel = 0
NodeLevel = 1
class Dependency:
PodName = ''
Policy = DependencyPolicy.NamespaceLevel
def clone(self):
d = Dependency()
d.PodName = self.PodName
d.Policy = self.Policy
return d
def equals(self, d):
return \
d.PodName == self.PodName and \
d.Policy == self.Policy
class ImSpec:
Name = ''
Namespace = ''
Version = 0
CreateAt = None
UpdateAt = None
class CloudVolumeSpec:
Type = ''
Dirs = []
def clone(self):
cv = CloudVolumeSpec()
cv.Type = self.Type
cv.Dirs = self.Dirs
return cv
def verify_params(self):
return \
isinstance(self.Type, str) and \
isinstance(self.Dirs, list)
def equals(self, cv):
if not isinstance(cv, CloudVolumeSpec):
return False
return \
cv.Type == self.Type and \
cv.Dirs == self.Dirs
class LogConfigSpec:
Type = ''
Config = {}
def clone(self):
lc = None
return lc
def verify_params(self):
return \
isinstance(self.Type, str) and \
isinstance(self.Config, dict)
def equals(self, s):
if not isinstance(s, LogConfigSpec):
return False
return \
s.Type == self.Type and \
s.Config == self.Config
class ContainerSpec(ImSpec):
Image = ''
Env = []
User = ''
WorkingDir = ''<|fim▁hole|> DnsSearch = []
Volumes = []
SystemVolumes = []
CloudVolumes = []
Command = None
Entrypoint = None
CpuLimit = 0
MemoryLimit = 0
Expose = 0
LogConfig = None
def clone(self):
s = ContainerSpec()
s.Name = self.Name
s.Namespace = self.Namespace
s.Version = self.Version
s.CreateAt = self.CreateAt
s.UpdateAt = self.UpdateAt
s.Image = self.Image
s.Env = copy.deepcopy(self.Env)
s.User = self.User
s.WorkingDir = self.WorkingDir
s.DnsSearch = copy.deepcopy(self.DnsSearch)
s.Volumes = copy.deepcopy(self.Volumes)
s.SystemVolumes = copy.deepcopy(self.SystemVolumes)
s.CloudVolumes = copy.deepcopy(self.CloudVolumes)
s.Command = copy.deepcopy(self.Command)
s.Entrypoint = copy.deepcopy(self.Entrypoint)
s.CpuLimit = self.CpuLimit
s.MemoryLimit = self.MemoryLimit
s.Expose = self.Expose
if isinstance(self.LogConfig, LogConfigSpec):
s.LogConfig = self.LogConfig.clone()
return s
def verify_params(self):
logconfig_flag = True if self.LogConfig is None else self.LogConfig.verify_params()
return \
self.Image != "" and \
self.CpuLimit >= 0 and \
self.MemoryLimit >= 0 and \
self.Expose >= 0 and \
logconfig_flag
def equals(self, s):
if not isinstance(s, ContainerSpec):
return False
if self.LogConfig is None and s.LogConfig is None:
logconfig_flag = True
else:
logconfig_flag = s.LogConfig.equals(self.LogConfig)
return \
s.Name == self.Name and \
s.Namespace == self.Namespace and \
s.CreateAt == self.CreateAt and \
s.UpdateAt == self.UpdateAt and \
s.Image == self.Image and \
s.Env == self.Env and \
s.User == self.User and \
s.WorkingDir == self.WorkingDir and \
s.DnsSearch == self.DnsSearch and \
s.Volumes == self.Volumes and \
s.SystemVolumes == self.SystemVolumes and \
s.CloudVolumes == self.CloudVolumes and \
s.Command == self.Command and \
s.Entrypoint == self.Entrypoint and \
s.CpuLimit == self.CpuLimit and \
s.MemoryLimit == self.MemoryLimit and \
s.Expose == self.Expose and \
logconfig_flag
def set_env(self, env_key, env_value):
for i in self.Env:
if re.match("%s\s*=" % env_key, i):
self.Env.remove(i)
self.Env.append("%s=%s" % (env_key, env_value))
class PodSpec(ImSpec):
Containers = []
Filters = []
Labels = {}
Dependencies = []
Annotation = ''
Stateful = False
SetupTime = 0
KillTimeout = 10
HealthConfig = {}
def clone(self):
s = PodSpec()
s.Name = self.Name
s.Namespace = self.Namespace
s.Version = self.Version
s.CreateAt = self.CreateAt
s.UpdateAt = self.UpdateAt
s.Containers = [c.clone() for c in self.Containers]
s.Labels = copy.deepcopy(self.Labels)
s.Filters = copy.deepcopy(self.Filters)
s.HealthConfig = copy.deepcopy(self.HealthConfig)
s.Dependencies = [d.clone() for d in self.Dependencies]
s.Annotation = self.Annotation
s.Stateful = self.Stateful
s.SetupTime = self.SetupTime
s.KillTimeout = self.KillTimeout
return s
def verify_params(self):
verify = \
self.Name != "" and \
self.Namespace != "" and \
isinstance(self.Stateful, bool) and \
len(self.Containers) > 0
if not verify:
return False
for c in self.Containers:
if isinstance(c, ContainerSpec) and c.verify_params():
continue
else:
return False
return True
def equals(self, s):
if not isinstance(s, PodSpec):
return False
if len(s.Containers) != len(self.Containers):
return False
for i in range(0, len(s.Containers)):
if not s.Containers[i].equals(self.Containers[i]):
return False
if len(s.Dependencies) != len(self.Dependencies):
return False
for i in range(0, len(s.Dependencies)):
if not s.Dependencies[i].equals(self.Dependencies[i]):
return False
return \
s.Name == self.Name and \
s.Namespace == self.Namespace and \
s.Annotation == self.Annotation and \
s.Stateful == self.Stateful and \
s.Filters == self.Filters and \
s.SetupTime == self.SetupTime and \
s.KillTimeout == self.KillTimeout and \
s.Labels == self.Labels and \
s.HealthConfig == self.HealthConfig
class PodGroupSpec(ImSpec):
Pod = None
NumInstances = 0
RestartPolicy = RestartPolicy.Never
def clone(self):
s = PodGroupSpec()
s.Name = self.Name
s.Namespace = self.Namespace
s.Pod = self.Pod.clone()
s.NumInstances = self.NumInstances
s.RestartPolicy = self.RestartPolicy
return s
def verify_params(self):
return \
self.Name != "" and \
self.Namespace != "" and \
self.NumInstances >= 0 and \
isinstance(self.Pod, PodSpec) and \
self.Pod.verify_params()
def equals(self, s):
return \
s.Name == self.Name and \
s.Namespace == self.Namespace and \
s.NumInstances == self.NumInstances and \
s.RestartPolicy == self.RestartPolicy and \
s.Pod.equals(self.Pod)
class AppSpec:
AppName = ''
PodGroups = []
def clone(self):
s = AppSpec()
s.AppName = self.AppName
s.PodGroups = [pg.clone() for pg in self.PodGroups]
return s
def verify_params(self):
verify = self.AppName != ""
if not verify:
return False
for pg in self.PodGroups:
if isinstance(pg, PodGroupSpec) and pg.verify_params():
continue
else:
return False
return True
def equals(self, s):
if not isinstance(s, AppSpec):
return False
if s.AppName != self.AppName:
return False
if len(s.PodGroups) != len(self.PodGroups):
return False
for i in range(0, len(s.PodGroups)):
if not s.PodGroups[i].equals(self.PodGroups[i]):
return False
return True
def render_app_spec(lain_config):
app = AppSpec()
app.AppName = lain_config.appname
app.PodGroups = [render_podgroup_spec(app.AppName, proc, lain_config.use_services, lain_config.use_resources)
for proc in lain_config.procs.values() if proc.type != ProcType.portal]
app.Portals = [render_pod_spec(app.AppName, proc, lain_config.use_services, lain_config.use_resources)
for proc in lain_config.procs.values() if proc.type == ProcType.portal]
return app
def render_podgroup_spec(app_name, proc, use_services, use_resources):
pod_group = PodGroupSpec()
pod_group.Name = "%s.%s.%s" % (
app_name, proc.type.name, proc.name
)
pod_group.Namespace = app_name
pod_group.NumInstances = proc.num_instances
pod_group.RestartPolicy = RestartPolicy.Always # TODO allow user definiton
pod_group.Pod = render_pod_spec(
app_name, proc, use_services, use_resources)
return pod_group
def render_pod_spec(app_name, proc, use_services, use_resources):
pod = PodSpec()
pod.Name = "%s.%s.%s" % (
app_name, proc.type.name, proc.name
)
pod.Namespace = app_name
pod.Containers = [render_container_spec(app_name, proc)]
pod.Dependencies = []
for service_app, service_list in use_services.iteritems():
for service in service_list:
pod.Dependencies.append(render_dependency(service_app, service))
if use_resources:
for resource_name, resource_props in use_resources.iteritems():
resource_service_names = resource_props['services']
for resouce_service_proc_name in resource_service_names:
pod.Dependencies.append(render_dependency(resource_instance_name(
resource_name, app_name), resouce_service_proc_name))
pod.Annotation = proc.annotation
pod.Stateful = proc.stateful
pod.SetupTime = proc.setup_time
pod.KillTimeout = proc.kill_timeout
pod.Labels = {} if not proc.labels else proc.labels
pod.Filters = [] if not proc.filters else proc.filters
pod.HealthConfig = {} if not proc.container_healthcheck else proc.container_healthcheck
return pod
def render_container_spec(app_name, proc):
c = ContainerSpec()
c.Image = proc.image
c.Env = copy.deepcopy(proc.env)
c.set_env("TZ", 'Asia/Shanghai')
c.User = '' if not hasattr(proc, 'user') else proc.user
c.WorkingDir = '' if not hasattr(proc, 'working_dir') else proc.working_dir
c.DnsSearch = [] if not hasattr(
proc, 'dns_search') else copy.deepcopy(proc.dns_search)
c.Volumes = copy.deepcopy(proc.volumes)
c.SystemVolumes = copy.deepcopy(
proc.system_volumes) + get_system_volumes_from_etcd(app_name)
c.CloudVolumes = render_cloud_volumes(proc.cloud_volumes)
c.Command = proc.cmd
c.Entrypoint = proc.entrypoint
c.CpuLimit = proc.cpu
c.MemoryLimit = humanfriendly.parse_size(proc.memory)
c.Expose = 0 if not proc.port else proc.port.keys()[0]
c.LogConfig = None
return c
def render_dependency(service_app, service):
from apis.models import App
d = Dependency()
d.PodName = "%s.portal.%s" % (
service_app,
App.get_portal_name_from_service_name(
App.get_or_none(service_app), service)
)
d.Policy = DependencyPolicy.NamespaceLevel # TODO allow user definiton
return d
def render_cloud_volumes(cloud_volumes):
volumes = []
for vol_type, vol_dirs in cloud_volumes.iteritems():
cv = CloudVolumeSpec()
cv.Type = vol_type
cv.Dirs = vol_dirs
volumes.append(cv)
return volumes
def json_of_spec(spec):
return json.loads(jsonpickle.encode(spec, unpicklable=False))
def render_podgroup_spec_from_json(spec_json):
pod_group = PodGroupSpec()
pod_group.Name = spec_json['Name']
pod_group.Namespace = spec_json['Namespace']
pod_group.NumInstances = spec_json['NumInstances']
pod_group.RestartPolicy = spec_json['RestartPolicy']
pod_group.Pod = render_pod_spec_from_json(spec_json['Pod'])
return pod_group
def render_pod_spec_from_json(spec_json):
pod = PodSpec()
pod.Name = spec_json['Name']
pod.Namespace = spec_json['Namespace']
containers = spec_json.get('Containers')
if not isinstance(containers, list):
containers = []
pod.Containers = [render_container_spec_from_json(
pod.Name, c) for c in containers]
dependencies = spec_json.get('Dependencies')
if not isinstance(dependencies, list):
dependencies = []
pod.Dependencies = [render_dependency_from_json(d) for d in dependencies]
pod.Annotation = spec_json['Annotation']
pod.Stateful = spec_json.get('Stateful', False)
pod.SetupTime = spec_json.get('SetupTime', 0)
pod.KillTimeout = spec_json.get('KillTimeout', 10)
pod.Version = spec_json['Version']
filters = spec_json.get('Filters')
if not isinstance(filters, list):
filters = []
pod.Filters = copy.deepcopy(filters)
return pod
def render_container_spec_from_json(app_name, spec_json):
c = ContainerSpec()
c.Image = spec_json['Image']
c.Env = copy.deepcopy(spec_json['Env'])
c.User = spec_json['User']
c.WorkingDir = spec_json['WorkingDir']
c.DnsSearch = copy.deepcopy(
spec_json['DnsSearch']) if spec_json.get('DnsSearch') else []
c.Volumes = copy.deepcopy(spec_json['Volumes'])
c.SystemVolumes = copy.deepcopy(spec_json['SystemVolumes'])
cloud_volumes = spec_json.get('CloudVolumes')
if not isinstance(cloud_volumes, list):
cloud_volumes = []
c.CloudVolumes = [render_cloud_volumes_spec_from_json(
cv) for cv in cloud_volumes]
c.Command = spec_json.get('Command')
c.Entrypoint = spec_json.get('Entrypoint')
c.CpuLimit = spec_json['CpuLimit']
c.MemoryLimit = spec_json['MemoryLimit']
c.Expose = spec_json['Expose'] if spec_json['Expose'] else 0
json_logconfig = spec_json.get('LogConfig', {})
c.LogConfig = LogConfigSpec()
c.LogConfig.Type = json_logconfig.get('Type', '')
c.LogConfig.Config = copy.deepcopy(
json_logconfig['Config']) if json_logconfig.get('Config') else {}
return c
def render_dependency_from_json(spec_json):
d = Dependency()
d.PodName = spec_json['PodName']
d.Policy = spec_json['Policy']
return d
def render_cloud_volumes_spec_from_json(spec_json):
cv = CloudVolumeSpec()
cv.Type = spec_json['Type']
cv.Dirs = spec_json['Dirs']
return cv<|fim▁end|> | |
<|file_name|>render.rs<|end_file_name|><|fim▁begin|>use std::{cmp, fmt, io};
#[cfg(feature = "termcolor")]
use termcolor::{ColorSpec, WriteColor};
use crate::{Doc, DocPtr};
/// Trait representing the operations necessary to render a document
pub trait Render {
type Error;
fn write_str(&mut self, s: &str) -> Result<usize, Self::Error>;
fn write_str_all(&mut self, mut s: &str) -> Result<(), Self::Error> {
while !s.is_empty() {
let count = self.write_str(s)?;
s = &s[count..];
}
Ok(())
}
fn fail_doc(&self) -> Self::Error;
}
/// Writes to something implementing `std::io::Write`
pub struct IoWrite<W> {
upstream: W,
}
impl<W> IoWrite<W> {
pub fn new(upstream: W) -> IoWrite<W> {
IoWrite { upstream }
}
}
impl<W> Render for IoWrite<W>
where
W: io::Write,
{
type Error = io::Error;
fn write_str(&mut self, s: &str) -> io::Result<usize> {
self.upstream.write(s.as_bytes())
}
fn write_str_all(&mut self, s: &str) -> io::Result<()> {
self.upstream.write_all(s.as_bytes())
}
fn fail_doc(&self) -> Self::Error {
io::Error::new(io::ErrorKind::Other, "Document failed to render")
}
}
/// Writes to something implementing `std::fmt::Write`
pub struct FmtWrite<W> {
upstream: W,
}
impl<W> FmtWrite<W> {
pub fn new(upstream: W) -> FmtWrite<W> {
FmtWrite { upstream }
}
}
impl<W> Render for FmtWrite<W>
where
W: fmt::Write,
{
type Error = fmt::Error;
fn write_str(&mut self, s: &str) -> Result<usize, fmt::Error> {
self.write_str_all(s).map(|_| s.len())
}
fn write_str_all(&mut self, s: &str) -> fmt::Result {
self.upstream.write_str(s)
}
fn fail_doc(&self) -> Self::Error {
fmt::Error
}
}
/// Trait representing the operations necessary to write an annotated document.
pub trait RenderAnnotated<'a, A>: Render {
fn push_annotation(&mut self, annotation: &'a A) -> Result<(), Self::Error>;
fn pop_annotation(&mut self) -> Result<(), Self::Error>;
}
impl<A, W> RenderAnnotated<'_, A> for IoWrite<W>
where
W: io::Write,
{
fn push_annotation(&mut self, _: &A) -> Result<(), Self::Error> {
Ok(())
}
fn pop_annotation(&mut self) -> Result<(), Self::Error> {
Ok(())
}
}
impl<A, W> RenderAnnotated<'_, A> for FmtWrite<W>
where
W: fmt::Write,
{
fn push_annotation(&mut self, _: &A) -> Result<(), Self::Error> {
Ok(())
}
fn pop_annotation(&mut self) -> Result<(), Self::Error> {
Ok(())
}
}
#[cfg(feature = "termcolor")]
pub struct TermColored<W> {
color_stack: Vec<ColorSpec>,
upstream: W,
}
#[cfg(feature = "termcolor")]
impl<W> TermColored<W> {
pub fn new(upstream: W) -> TermColored<W> {
TermColored {
color_stack: Vec::new(),
upstream,
}
}
}
#[cfg(feature = "termcolor")]
impl<W> Render for TermColored<W>
where
W: io::Write,
{
type Error = io::Error;
fn write_str(&mut self, s: &str) -> io::Result<usize> {
self.upstream.write(s.as_bytes())
}
fn write_str_all(&mut self, s: &str) -> io::Result<()> {
self.upstream.write_all(s.as_bytes())
}
fn fail_doc(&self) -> Self::Error {
io::Error::new(io::ErrorKind::Other, "Document failed to render")
}
}
#[cfg(feature = "termcolor")]
impl<W> RenderAnnotated<'_, ColorSpec> for TermColored<W>
where
W: WriteColor,
{
fn push_annotation(&mut self, color: &ColorSpec) -> Result<(), Self::Error> {
self.color_stack.push(color.clone());
self.upstream.set_color(color)
}
fn pop_annotation(&mut self) -> Result<(), Self::Error> {
self.color_stack.pop();
match self.color_stack.last() {
Some(previous) => self.upstream.set_color(previous),
None => self.upstream.reset(),
}
}
}
enum Annotation<'a, A> {
Push(&'a A),
Pop,
}
struct BufferWrite<'a, A> {
buffer: String,
annotations: Vec<(usize, Annotation<'a, A>)>,
}
impl<'a, A> BufferWrite<'a, A> {
fn new() -> Self {
BufferWrite {
buffer: String::new(),
annotations: Vec::new(),
}
}
fn render<W>(&mut self, render: &mut W) -> Result<(), W::Error>
where
W: RenderAnnotated<'a, A>,
W: ?Sized,
{
let mut start = 0;
for (end, annotation) in &self.annotations {
let s = &self.buffer[start..*end];
if !s.is_empty() {
render.write_str_all(s)?;
}
start = *end;
match annotation {
Annotation::Push(a) => render.push_annotation(a)?,
Annotation::Pop => render.pop_annotation()?,
}
}
let s = &self.buffer[start..];
if !s.is_empty() {
render.write_str_all(s)?;
}
Ok(())
}
}
impl<A> Render for BufferWrite<'_, A> {
type Error = ();
fn write_str(&mut self, s: &str) -> Result<usize, Self::Error> {
self.buffer.push_str(s);
Ok(s.len())
}
fn write_str_all(&mut self, s: &str) -> Result<(), Self::Error> {
self.buffer.push_str(s);
Ok(())
}
fn fail_doc(&self) -> Self::Error {}
}
impl<'a, A> RenderAnnotated<'a, A> for BufferWrite<'a, A> {
fn push_annotation(&mut self, a: &'a A) -> Result<(), Self::Error> {
self.annotations
.push((self.buffer.len(), Annotation::Push(a)));
Ok(())
}
fn pop_annotation(&mut self) -> Result<(), Self::Error> {
self.annotations.push((self.buffer.len(), Annotation::Pop));
Ok(())
}
}
macro_rules! make_spaces {
() => { "" };
($s: tt $($t: tt)*) => { concat!(" ", make_spaces!($($t)*)) };
}
pub(crate) const SPACES: &str = make_spaces!(,,,,,,,,,,);
fn append_docs2<'a, 'd, T, A>(
ldoc: &'d Doc<'a, T, A>,
rdoc: &'d Doc<'a, T, A>,
mut consumer: impl FnMut(&'d Doc<'a, T, A>),
) -> &'d Doc<'a, T, A>
where
T: DocPtr<'a, A>,
{
let d = append_docs(rdoc, &mut consumer);
consumer(d);
append_docs(ldoc, &mut consumer)
}
fn append_docs<'a, 'd, T, A>(
mut doc: &'d Doc<'a, T, A>,
consumer: &mut impl FnMut(&'d Doc<'a, T, A>),
) -> &'d Doc<'a, T, A>
where
T: DocPtr<'a, A>,
{
loop {
// Since appended documents often appear in sequence on the left side we
// gain a slight performance increase by batching these pushes (avoiding
// to push and directly pop `Append` documents)
match doc {
Doc::Append(l, r) => {
let d = append_docs(r, consumer);
consumer(d);
doc = l;
}
_ => return doc,
}
}
}
pub fn best<'a, W, T, A>(doc: &Doc<'a, T, A>, width: usize, out: &mut W) -> Result<(), W::Error>
where
T: DocPtr<'a, A> + 'a,
for<'b> W: RenderAnnotated<'b, A>,
W: ?Sized,
{
let temp_arena = &typed_arena::Arena::new();
Best {
pos: 0,
bcmds: vec![(0, Mode::Break, doc)],
fcmds: vec![],
annotation_levels: vec![],
width,
temp_arena,
}
.best(0, out)?;
Ok(())
}
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
enum Mode {
Break,
Flat,
}
type Cmd<'d, 'a, T, A> = (usize, Mode, &'d Doc<'a, T, A>);
fn write_newline<W>(ind: usize, out: &mut W) -> Result<(), W::Error>
where
W: ?Sized + Render,
{
out.write_str_all("\n")?;
write_spaces(ind, out)
}
fn write_spaces<W>(spaces: usize, out: &mut W) -> Result<(), W::Error>
where
W: ?Sized + Render,
{
let mut inserted = 0;
while inserted < spaces {
let insert = cmp::min(SPACES.len(), spaces - inserted);
inserted += out.write_str(&SPACES[..insert])?;
}
Ok(())
}
struct Best<'d, 'a, T, A>
where
T: DocPtr<'a, A> + 'a,
{
pos: usize,
bcmds: Vec<Cmd<'d, 'a, T, A>>,
fcmds: Vec<&'d Doc<'a, T, A>>,
annotation_levels: Vec<usize>,
width: usize,
temp_arena: &'d typed_arena::Arena<T>,
}
impl<'d, 'a, T, A> Best<'d, 'a, T, A>
where
T: DocPtr<'a, A> + 'a,
{
fn fitting(&mut self, next: &'d Doc<'a, T, A>, mut pos: usize, ind: usize) -> bool
where
T: DocPtr<'a, A>,
{
let mut bidx = self.bcmds.len();
self.fcmds.clear(); // clear from previous calls from best
self.fcmds.push(next);
let mut mode = Mode::Flat;
loop {
let mut doc = match self.fcmds.pop() {
None => {
if bidx == 0 {
// All commands have been processed
return true;
} else {
bidx -= 1;
mode = Mode::Break;
self.bcmds[bidx].2
}
}
Some(cmd) => cmd,
};
loop {
match *doc {
Doc::Nil => {}
Doc::Append(ref ldoc, ref rdoc) => {
doc = append_docs2(ldoc, rdoc, |doc| self.fcmds.push(doc));
continue;
}
// Newlines inside the group makes it not fit, but those outside lets it
// fit on the current line
Doc::Hardline => return mode == Mode::Break,
Doc::RenderLen(len, _) => {
pos += len;
if pos > self.width {
return false;
}
}
Doc::BorrowedText(ref str) => {
pos += str.len();
if pos > self.width {
return false;
}
}
Doc::OwnedText(ref str) => {
pos += str.len();
if pos > self.width {
return false;
}
}
Doc::SmallText(ref str) => {
pos += str.len();
if pos > self.width {
return false;
}
}
Doc::FlatAlt(ref b, ref f) => {
doc = match mode {
Mode::Break => b,
Mode::Flat => f,
};
continue;
}
Doc::Column(ref f) => {
doc = self.temp_arena.alloc(f(pos));
continue;
}
Doc::Nesting(ref f) => {
doc = self.temp_arena.alloc(f(ind));
continue;
}
Doc::Nest(_, ref next)
| Doc::Group(ref next)
| Doc::Annotated(_, ref next)
| Doc::Union(_, ref next) => {
doc = next;
continue;
}
Doc::Fail => return false,
}
break;
}
}
}
fn best<W>(&mut self, top: usize, out: &mut W) -> Result<bool, W::Error>
where
W: RenderAnnotated<'d, A>,
W: ?Sized,
{
let mut fits = true;
while top < self.bcmds.len() {
let mut cmd = self.bcmds.pop().unwrap();
loop {
let (ind, mode, doc) = cmd;
match *doc {
Doc::Nil => {}
Doc::Append(ref ldoc, ref rdoc) => {
cmd.2 = append_docs2(ldoc, rdoc, |doc| self.bcmds.push((ind, mode, doc)));
continue;
}
Doc::FlatAlt(ref b, ref f) => {
cmd.2 = match mode {
Mode::Break => b,
Mode::Flat => f,
};
continue;
}
Doc::Group(ref doc) => {
if let Mode::Break = mode {
if self.fitting(doc, self.pos, ind) {
cmd.1 = Mode::Flat;
}
}
cmd.2 = doc;
continue;
}
Doc::Nest(off, ref doc) => {
cmd = ((ind as isize).saturating_add(off) as usize, mode, doc);
continue;
}
Doc::Hardline => {
write_newline(ind, out)?;
self.pos = ind;
}
Doc::RenderLen(len, ref doc) => match **doc {
Doc::OwnedText(ref s) => {
out.write_str_all(s)?;
self.pos += len;
fits &= self.pos <= self.width;
}
Doc::BorrowedText(ref s) => {
out.write_str_all(s)?;
self.pos += len;
fits &= self.pos <= self.width;
}
Doc::SmallText(ref s) => {
out.write_str_all(s)?;
self.pos += len;<|fim▁hole|> fits &= self.pos <= self.width;
}
_ => unreachable!(),
},
Doc::OwnedText(ref s) => {
out.write_str_all(s)?;
self.pos += s.len();
fits &= self.pos <= self.width;
}
Doc::BorrowedText(ref s) => {
out.write_str_all(s)?;
self.pos += s.len();
fits &= self.pos <= self.width;
}
Doc::SmallText(ref s) => {
out.write_str_all(s)?;
self.pos += s.len();
fits &= self.pos <= self.width;
}
Doc::Annotated(ref ann, ref doc) => {
out.push_annotation(ann)?;
self.annotation_levels.push(self.bcmds.len());
cmd.2 = doc;
continue;
}
Doc::Union(ref l, ref r) => {
let pos = self.pos;
let annotation_levels = self.annotation_levels.len();
let bcmds = self.bcmds.len();
self.bcmds.push((ind, mode, l));
let mut buffer = BufferWrite::new();
match self.best(bcmds, &mut buffer) {
Ok(true) => buffer.render(out)?,
Ok(false) | Err(()) => {
self.pos = pos;
self.bcmds.truncate(bcmds);
self.annotation_levels.truncate(annotation_levels);
cmd.2 = r;
continue;
}
}
}
Doc::Column(ref f) => {
cmd.2 = self.temp_arena.alloc(f(self.pos));
continue;
}
Doc::Nesting(ref f) => {
cmd.2 = self.temp_arena.alloc(f(ind));
continue;
}
Doc::Fail => return Err(out.fail_doc()),
}
break;
}
while self.annotation_levels.last() == Some(&self.bcmds.len()) {
self.annotation_levels.pop();
out.pop_annotation()?;
}
}
Ok(fits)
}
}<|fim▁end|> | |
<|file_name|>find_moves_faster.rs<|end_file_name|><|fim▁begin|>///This is an attempt to port the Edax move generation function to rust.
#[allow(non_snake_case)]
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub fn fast_find_moves(P : u64, O : u64) -> u64 {
const mask_7e : u64 = 0x7e7e7e7e7e7e7e7eu64;
let mut moves : u64 = 0;
unsafe{
asm!("
movl $3, %esi
movq $1, %mm7
movl $4, %edi
movq $2, %mm6
movl %esi, %eax
movq %mm7, %mm0
movq $5, %mm5
shrl $$1, %eax
psrlq $$8, %mm0
andl $$2122219134, %edi
pand %mm6, %mm5
andl %edi, %eax
pand %mm6, %mm0
movl %eax, %edx
movq %mm0, %mm1
shrl $$1, %eax
psrlq $$8, %mm0
movl %edi, %ecx
movq %mm6, %mm3
andl %edi, %eax<|fim▁hole|>
psrlq $$8, %mm3
orl %edx, %eax
por %mm1, %mm0
andl %edi, %ecx
pand %mm6, %mm3
movl %eax, %edx
movq %mm0, %mm4
shrl $$2, %eax
psrlq $$16, %mm0
andl %ecx, %eax
pand %mm3, %mm0
orl %eax, %edx
por %mm0, %mm4
shrl $$2, %eax
psrlq $$16, %mm0
andl %ecx, %eax
pand %mm3, %mm0
orl %edx, %eax
por %mm0, %mm4
shrl $$1, %eax
psrlq $$8, %mm4
movq %mm7, %mm0
addl %esi, %esi
psllq $$8, %mm0
andl %edi, %esi
pand %mm6, %mm0
movl %esi, %edx
movq %mm0, %mm1
addl %esi, %esi
psllq $$8, %mm0
andl %edi, %esi
pand %mm6, %mm0
orl %esi, %edx
por %mm1, %mm0
addl %ecx, %ecx
psllq $$8, %mm3
movq %mm0, %mm1
leal (,%edx,4), %esi
psllq $$16, %mm0
andl %ecx, %esi
pand %mm3, %mm0
orl %esi, %edx
por %mm0, %mm1
shll $$2, %esi
psllq $$16, %mm0
andl %ecx, %esi
pand %mm3, %mm0
orl %edx, %esi
por %mm1, %mm0
addl %esi, %esi
psllq $$8, %mm0
orl %eax, %esi
por %mm0, %mm4
movq %mm7, %mm0
movd %esi, %mm1
psrlq $$7, %mm0
psllq $$32, %mm1
pand %mm5, %mm0
por %mm1, %mm4
movq %mm0, %mm1
psrlq $$7, %mm0
pand %mm5, %mm0
movq %mm5, %mm3
por %mm1, %mm0
psrlq $$7, %mm3
movq %mm0, %mm1
pand %mm5, %mm3
psrlq $$14, %mm0
pand %mm3, %mm0
movl $1, %esi
por %mm0, %mm1
movl $2, %edi
psrlq $$14, %mm0
andl $$2122219134, %edi
pand %mm3, %mm0
movl %edi, %ecx
por %mm1, %mm0
shrl $$1, %ecx
psrlq $$7, %mm0
andl %edi, %ecx
por %mm0, %mm4
movl %esi, %eax
movq %mm7, %mm0
shrl $$1, %eax
psllq $$7, %mm0
andl %edi, %eax
pand %mm5, %mm0
movl %eax, %edx
movq %mm0, %mm1
shrl $$1, %eax
psllq $$7, %mm0
andl %edi, %eax
pand %mm5, %mm0
orl %edx, %eax
por %mm1, %mm0
psllq $$7, %mm3
movl %eax, %edx
movq %mm0, %mm1
shrl $$2, %eax
psllq $$14, %mm0
andl %ecx, %eax
pand %mm3, %mm0
orl %eax, %edx
por %mm0, %mm1
shrl $$2, %eax
psllq $$14, %mm0
andl %ecx, %eax
pand %mm3, %mm0
orl %edx, %eax
por %mm1, %mm0
shrl $$1, %eax
psllq $$7, %mm0
por %mm0, %mm4
movq %mm7, %mm0
addl %esi, %esi
psrlq $$9, %mm0
andl %edi, %esi
pand %mm5, %mm0
movl %esi, %edx
movq %mm0, %mm1
addl %esi, %esi
psrlq $$9, %mm0
andl %edi, %esi
pand %mm5, %mm0
movq %mm5, %mm3
orl %esi, %edx
por %mm1, %mm0
psrlq $$9, %mm3
movq %mm0, %mm1
addl %ecx, %ecx
pand %mm5, %mm3
leal (,%edx,4), %esi
psrlq $$18, %mm0
andl %ecx, %esi
pand %mm3, %mm0
orl %esi, %edx
por %mm0, %mm1
shll $$2, %esi
psrlq $$18, %mm0
andl %ecx, %esi
pand %mm3, %mm0
orl %edx, %esi
por %mm1, %mm0
addl %esi, %esi
psrlq $$9, %mm0
orl %eax, %esi
por %mm0, %mm4
movq %mm7, %mm0
movd %esi, %mm1
psllq $$9, %mm0
por %mm1, %mm4
pand %mm5, %mm0
movq %mm0, %mm1
psllq $$9, %mm0
pand %mm5, %mm0
por %mm1, %mm0
psllq $$9, %mm3
movq %mm0, %mm1
psllq $$18, %mm0
pand %mm3, %mm0
por %mm0, %mm1
psllq $$18, %mm0
pand %mm3, %mm0
por %mm1, %mm0
psllq $$9, %mm0
por %mm0, %mm4
por %mm6, %mm7
pandn %mm4, %mm7
movq %mm7, $0
emms
"
: "=r" (moves) : "m" (P), "m" (O), "m" ((P >> 32) as u32), "m" ((O >> 32) as u32), "m" (mask_7e) : "eax", "edx", "ecx", "esi", "edi" : "volatile");
}
moves
}<|fim▁end|> | pand %mm6, %mm0
shrl $1, %ecx |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use super::{Block, ListItem, Span};
trait JoinHelper<I>
where
I: Iterator,
{
fn j(self, sep: &'static str) -> String;
}
impl<I> JoinHelper<I> for I
where
I: Iterator<Item = String>,
{
fn j(self, sep: &'static str) -> String {
self.collect::<Vec<String>>().join(sep)
}
}
fn gen_block(b: Block) -> String {
use Block::*;
match b {
Header(s, level) => format!(
"{} {}",
::std::iter::repeat("#".to_string()).take(level).j(""),
generate_from_spans(s)
),
Paragraph(s) => generate_from_spans(s),
Blockquote(bb) => generate(bb).lines().map(|x| format!("> {}", x)).j("\n"),
CodeBlock(lang, x) => {
if lang.is_none() {
x.lines().map(|x| format!(" {}", x)).j("\n")
} else {
format!("```{}\n{}```", lang.unwrap(), x)
}
}
// [TODO]: Ordered list generation - 2017-12-10 10:12pm
OrderedList(_x, _num_type) => unimplemented!("Generate ordered list"),
UnorderedList(x) => generate_from_li(x),
LinkReference(id, url, None) => format!("[{}]: {}", id, url),
LinkReference(id, url, Some(title)) => format!("[{}]: {} \"{}\"", id, url, title),
Raw(x) => x,
Hr => "===".to_owned(),
}
}
fn gen_span(s: Span) -> String {
use Span::*;
match s {
Break => " \n".to_string(),
Text(x) => x,
Literal(x) => format!("\\{}", x),
Code(x) => format!("`{}`", x),
Link(a, b, None) => format!("[{}]({})", generate_from_spans(a), b),
Link(a, b, Some(c)) => format!("[{}]({} \"{}\")", generate_from_spans(a), b, c),
RefLink(_, _, raw) => raw,
Image(a, b, None) => format!("", a, b),
Image(a, b, Some(c)) => format!("", a, b, c),
Emphasis(x) => format!("*{}*", generate_from_spans(x)),
Strong(x) => format!("**{}**", generate_from_spans(x)),
}
}
fn generate_from_li(data: Vec<ListItem>) -> String {
use ListItem::*;
data.into_iter()
.map(|x| {<|fim▁hole|> "* {}",
match x {
Simple(x) => generate_from_spans(x),
Paragraph(x) => format!(
"{}\n",
generate(x)
.lines()
.enumerate()
.map(|(i, x)| if i == 0 {
x.to_string()
} else {
format!(" {}", x)
})
.j("\n")
),
}
)
})
.j("\n")
}
fn generate_from_spans(data: Vec<Span>) -> String {
data.into_iter().map(gen_span).j("")
}
pub fn generate(data: Vec<Block>) -> String {
data.into_iter().map(gen_block).j("\n\n")
}<|fim▁end|> | format!( |
<|file_name|>addAudios.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, ViewChild, Inject } from '@angular/core';
import { FormGroup, FormBuilder, Validators } from '@angular/forms';
import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material';
import { environment } from '../../../../../../environments/environment';
import { AudioDataService } from '../../../../../../app/core/services/user/audioData';
@Component({
selector: 'app-addAudios',
templateUrl: './addAudios.component.html'
})
export class MainNewPublicationAddAudiosComponent implements OnInit {
public sessionData: any;
public translations: any;
public environment: any = environment;
public noData: boolean;
public loadingData: boolean;
public loadMoreData: boolean;
public loadingMoreData: boolean;
constructor(
@Inject(MAT_DIALOG_DATA) public data: any,
public dialogRef: MatDialogRef<MainNewPublicationAddAudiosComponent>,
private audioDataService: AudioDataService
) { }
ngOnInit() {
this.sessionData = this.data.sessionData;
this.translations = this.data.translations;
this.data.rowsDefault = 0;
this.data.list = this.data.list ? this.data.list : [];
this.data.arrayAddedItems = [];
this.data.arrayAddedItems = Object.assign([], this.data.array);
this.data.arrayAddedItemsCopy = [];
this.data.arrayAddedItemsCopy = Object.assign([], this.data.array);
if (this.data.list.length > 0) {
for (let i in this.data.list)
this.data.list[i].selected = false;
for (let i in this.data.list)
for (let e in this.data.array)
if (this.data.list[i].id == this.data.array[e].id)
this.data.list[i].selected = true;
this.data.list = this.data.list ? this.data.list : [];
this.data.rowsDefault = this.data.rows;
this.loadMoreData = this.data.loadMore;
} else {
this.default(this.data.sessionData.current.username);
}
}
// Default
default(user) {
this.data.rowsDefault = 0;
this.loadingData = true;
this.data.list = [];
this.noData = false;
this.loadMoreData = false;
this.loadingMoreData = false;
let data = {
user: user,
type: 'default',
rows: this.data.rowsDefault,
cuantity: environment.cuantity
}
this.audioDataService.default(data)
.subscribe(res => {
setTimeout(() => {
this.loadingData = false;
if (res.length == 0) {
this.noData = true;
} else {
this.loadMoreData = (res.length < environment.cuantity) ? false : true;
this.noData = false;
this.data.list = res;
}
}, 600);
});
}
// Load more
loadMore() {
this.loadingMoreData = true;
this.data.rowsDefault++;
let data = {
user: this.sessionData.current.id,
type: 'default',
rows: this.data.rowsDefault,
cuantity: environment.cuantity
}
this.audioDataService.default(data)
.subscribe(res => {
setTimeout(() => {
this.loadMoreData = (res.length < environment.cuantity) ? false : true;
this.loadingMoreData = false;
for (let i in res)
this.data.list.push(res[i]);
}, 600);
});
}
// Select/unselect
toggleItem(item){
if (item.selected) {
for (let i in this.data.arrayAddedItems)
if (this.data.arrayAddedItems[i].id == item.id)
this.data.arrayAddedItems.splice(i, 1);
item.selected = false;
} else {
this.data.arrayAddedItems.push(item);
item.selected = true;
}
}
// Save
submit(event: Event){
let data = {
array: this.data.arrayAddedItems,
list: this.data.list,
rows: this.data.rowsDefault,
loadMore: this.loadMoreData
}
this.dialogRef.close(data);
}
// Cancel
close(event: Event){
let data = {
array: this.data.arrayAddedItemsCopy,
list: this.data.list,
rows: this.data.rowsDefault,
loadMore: this.loadMoreData
}
this.dialogRef.close(data);<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>vtopo.py<|end_file_name|><|fim▁begin|>'''
Virtual topology
'''
class VTopo(object):
'''
Attributes:
- switches : virtual switch list
- links : virtual links
'''
def __init__(self):
super(VTopo, self).__init__()
self.isStart = False
self.switches = []
self.links = []
def addSwitch(self, vswitch):
'''
Add new virtual switch
Mapping between physical and virtual automatically
'''
pass
def addLink(self, vlink):
'''
Add new virtual link
Mapping between physical and virtual automatically
'''<|fim▁hole|> def getVPSwitchMapping(self, vswitch):
'''
get virtual to physical mapping
'''
pass
def getPVSwitchMapping(self, pswitch):
'''
get physical to virtual mapping
'''
pass
def start(self):
pass<|fim▁end|> | pass
|
<|file_name|>img_to_queue.py<|end_file_name|><|fim▁begin|>import socket
import random
from PIL import Image
import json
import sys, getopt
import math
import pika
# Screen VARS
offset_x = 80
offset_y = 24
screen_width = 240
screen_height = 240
# Internal options
queueAddress = ''
fileName = ''
workers = 36
Matrix = []
def main(argv):
global fileName, workers
inputFile = ''
try:
opts, args = getopt.getopt(argv, "hi:w:", ["file=", "workers="])
except getopt.GetoptError:
print('img_to_queue.py -i <inputfile> -w workers')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('img_to_queue.py -i <inputfile> -w workers')
sys.exit()
elif opt in ("-i", "--file"):
fileName = arg
print("File to process: " + fileName)
elif opt in ("-w", "--workers"):
workers = int(arg)
if (math.sqrt(float(workers)) - int(math.sqrt(float(workers))) > 0):
print('The square root of amount of workers is not a whole numbers. GTFO!')
sys.exit()
print("Amount of available workers: " + str(workers))
pompImage()
def addPixelToWorkFile(x, y, r, g, b, index_x, index_y, Matrix):
#print("Current index x:" + str(index_x) + " y: " + str(index_y))
Matrix[index_x][index_y].append({'x': x, 'y': y, 'rgb': "%0.2X" % r + '' + "%0.2X" % g + '' + "%0.2X" % b})
def pompImage():
print("Processiong image to JSON")
im = Image.open(fileName).convert('RGB')
im.thumbnail((240, 240), Image.ANTIALIAS)
_, _, width, height = im.getbbox()
# start with x and y index 1
slice_size = int(screen_width / int(math.sqrt(workers)))
amount_of_keys = int(screen_width / slice_size)
print(amount_of_keys)
w, h = amount_of_keys, amount_of_keys
Matrix = [[[] for x in range(w)] for y in range(h)]
# workFile = [[0 for x in range(amount_of_keys)] for y in range(amount_of_keys)]
for x in range(width):
index_x = int((x / slice_size))
for y in range(height):
r, g, b = im.getpixel((x, y))
index_y = int((y / slice_size))<|fim▁hole|> addPixelToWorkFile(x + offset_x, y + offset_y, r, g, b, index_x, index_y, Matrix)
# print("Current index x:"+str(index_x)+" y: "+str(index_y)+" WORKER:"+str(index_y*index_x))
sendToQueue(Matrix)
def sendToQueue(arrayOfWorkers):
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost',
credentials=pika.PlainCredentials(username='pomper',
password='pomper')))
channel = connection.channel()
channel.queue_declare(queue='pomper', durable=False,)
channel.queue_purge(queue='pomper')
for worker in arrayOfWorkers:
for pixels in worker:
channel.basic_publish(exchange='',
routing_key='pomper',
body=json.dumps(pixels))
if __name__ == "__main__":
main(sys.argv[1:])<|fim▁end|> | |
<|file_name|>underline.icon.js<|end_file_name|><|fim▁begin|>(function ()<|fim▁hole|> window.AgidoMockups = window.AgidoMockups || {};
AgidoMockups.icons = AgidoMockups.icons || {};
AgidoMockups.icons.underline = new Kinetic.Group({name: "underlineIcon", width: 18, height: 20});
AgidoMockups.icons.underline.add(new Kinetic.Text({text: "U", fill: '#000', fontSize: 20, fontStyle: 'normal'}));
AgidoMockups.icons.underline.add(new Kinetic.Line({
points: [1, 19, 13, 19],
stroke: '#000',
strokeWidth: 1
}));
})();<|fim▁end|> | { |
<|file_name|>private_client_test.go<|end_file_name|><|fim▁begin|>package clients
import (
"gopkg.in/jarcoal/httpmock.v1"
"reflect"
"testing"
"time"
)
//
//
//
func Test_GetAccountReportStatus(t *testing.T) {
// Setup the mocks
httpmock.Activate()
defer httpmock.DeactivateAndReset()
// Mock the time request
httpmock.RegisterResponder(
"GET",
"https://mock-api.gdax.com/reports/0428b97b-bec1-429e-a94c-59232926778d",
httpmock.NewStringResponder(
200,
`
{
"id": "0428b97b-bec1-429e-a94c-59232926778d",
"type": "fills",
"status": "creating",
"created_at": "2015-01-06T10:34:47.000Z",
"expires_at": "2015-01-13T10:35:47.000Z",
"params": {
"start_date": "2014-11-01T00:00:00.000Z",<|fim▁hole|> }
}
`,
),
)
client := NewMockClient()
output, err := GetAccountReportStatus(client, "0428b97b-bec1-429e-a94c-59232926778d")
expected := AccountReportStatus{
ID: "0428b97b-bec1-429e-a94c-59232926778d",
Type: "fills",
Status: "creating",
CreatedAt: time.Date(2015, 01, 06, 10, 34, 47, 0, time.UTC),
CompletedAt: time.Date(0001, 01, 01, 00, 00, 00, 0, time.UTC),
ExpiresAt: time.Date(2015, 01, 13, 10, 35, 47, 0, time.UTC),
FileURL: "",
}
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Expected output to not be nil, actual = %v", output)
}
if !reflect.DeepEqual(output.ID, expected.ID) {
t.Fatalf("Expected output.ID %v to match expected.ID %v", output.ID, expected.ID)
}
if !reflect.DeepEqual(output.Type, expected.Type) {
t.Fatalf("Expected output.Type %v to match expected.Type %v", output.Type, expected.Type)
}
if !reflect.DeepEqual(output.Status, expected.Status) {
t.Fatalf("Expected output.Status %v to match expected.Status %v", output.Status, expected.Status)
}
if !reflect.DeepEqual(output.CreatedAt.UTC().Unix(), expected.CreatedAt.UTC().Unix()) {
t.Fatalf("Expected output.CreatedAt %v to match expected.CreatedAt %v", output.CreatedAt, expected.CreatedAt)
}
if !reflect.DeepEqual(output.CompletedAt.UTC().Unix(), expected.CompletedAt.UTC().Unix()) {
t.Fatalf("Expected output.CompletedAt %v to match expected.CompletedAt %v", output.CompletedAt, expected.CompletedAt)
}
if !reflect.DeepEqual(output.FileURL, expected.FileURL) {
t.Fatalf("Expected output.FileURL %v to match expected.FileURL %v", output.FileURL, expected.FileURL)
}
}
//
//
//
func Test_mock_GetAccountTrailingVolume(t *testing.T) {
// Setup the mocks
httpmock.Activate()
defer httpmock.DeactivateAndReset()
// Mock the time request
httpmock.RegisterResponder(
"GET",
"https://mock-api.gdax.com/users/self/trailing-volume",
httpmock.NewStringResponder(
200,
`
[
{
"product_id": "BTC-USD",
"exchange_volume": "11800.00000000",
"volume": "100.00000000",
"recorded_at": "1973-11-29T00:05:01.123456Z"
},
{
"product_id": "LTC-USD",
"exchange_volume": "51010.04100000",
"volume": "2010.04100000",
"recorded_at": "1973-11-29T00:05:02.123456Z"
}
]
`,
),
)
client := NewMockClient()
expected := AccountTrailingVolume{
ProductID: "BTC-USD",
ExchangeVolume: 11800.00000000,
Volume: 100.00000000,
RecordedAt: time.Date(1973, 11, 29, 00, 05, 01, 123456*1000, time.UTC),
}
output, err := GetAccountTrailingVolume(client)
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if len(output) != 2 {
t.Fatalf("Expected output.length = 2, actual = %v", len(output))
}
if !reflect.DeepEqual(output[0].RecordedAt, expected.RecordedAt) {
t.Fatalf("Expected output.RecordedAt %v to match expected.RecordedAt %v", output[0].RecordedAt, expected.RecordedAt)
}
if output[0].ProductID != expected.ProductID {
t.Fatalf("Expected output.ProductID %v to match expected %v", output[0].ProductID, expected.ProductID)
}
if output[0].ExchangeVolume != expected.ExchangeVolume {
t.Fatalf("Expected output.ExchangeVolume %v to match expected %v", output[0].ExchangeVolume, expected.ExchangeVolume)
}
if output[0].Volume != expected.Volume {
t.Fatalf("Expected output.Volume %v to match expected %v", output[0].Volume, expected.Volume)
}
}<|fim▁end|> | "end_date": "2014-11-30T23:59:59.000Z" |
<|file_name|>drivers-list.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function () {
'use strict';
angular
.module('webApp')
.controller('DriversListCtrl', DriversListCtrl);
DriversListCtrl.$inject = ['$scope', 'EntityFactory', 'ModalService', 'UtilsService', '$state'];
function DriversListCtrl($scope, EntityFactory, ModalService, UtilsService, $state) {
/*jshint validthis: true*/
var vm = this;
vm.deleteDriver = deleteDriver;
vm.getAllDrivers = getAllDrivers;
vm.createDriver = createDriver;
vm.sortDrivers = sortDrivers;
vm.tableReverse = false;
vm.sortField = 'fileName';
vm.errorMessage = {
type: 'error',
text: '',
internalTrace: ''
};
vm.successMessage = {
type: 'success',
text: '',
internalTrace: ''
};
init();
/////////////////////////////////
function init() {
getAllDrivers();
}
function getAllDrivers() {
EntityFactory.getAllDrivers().then(function (drivers) {
vm.driversData = drivers;
});
}
function createDriver() {
var controller = 'CreateEntityModalCtrl';
var templateUrl = "templates/modal/entity-creation-modal.tpl.html";
var resolve = {
type: function () {
return "DRIVER";
},
title: function () {
return "_ENTITY_._CREATE_DRIVER_TITLE_";
},
info: function () {
return "_DRIVER_INFO_";
},
text: function () {
return "_DRIVER_TEXT_";
},
};
var modalInstance = ModalService.openModal(controller, templateUrl, resolve, '', 'lg');
return modalInstance.result.then(function () {
getAllDrivers();
vm.successMessage.text = '_DRIVER_CREATE_OK_';
});
}
function deleteDriver(fileName) {
return deleteDriverConfirm('lg', fileName);
}
function deleteDriverConfirm(size, fileName) {
var controller = 'DeleteEntityModalCtrl';
var templateUrl = "templates/modal/entity-delete-modal.tpl.html";
var resolve = {
item: function () {
return fileName;
},
type: function () {
return "DRIVER";
},
title: function () {
return "_ENTITY_._DELETE_DRIVER_TITLE_";
}
};
var modalInstance = ModalService.openModal(controller, templateUrl, resolve, '', size);
return modalInstance.result.then(function (fileName) {
var index = UtilsService.getArrayElementPosition(vm.driversData, 'fileName', fileName);<|fim▁hole|> });
}
function sortDrivers(fieldName) {
if (fieldName == vm.sortField) {
vm.tableReverse = !vm.tableReverse;
} else {
vm.tableReverse = false;
vm.sortField = fieldName;
}
}
}
})();<|fim▁end|> | vm.driversData.splice(index, 1);
vm.successMessage.text = '_DRIVER_DELETE_OK_'; |
<|file_name|>add.py<|end_file_name|><|fim▁begin|>import wx
import eos.db
import gui.mainFrame
from gui import globalEvents as GE
from gui.fitCommands.calc.module.projectedAdd import CalcAddProjectedModuleCommand
from gui.fitCommands.helpers import InternalCommandHistory, ModuleInfo<|fim▁hole|>
def __init__(self, fitID, itemID):
wx.Command.__init__(self, True, 'Add Projected Module')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.itemID = itemID
def Do(self):
cmd = CalcAddProjectedModuleCommand(fitID=self.fitID, modInfo=ModuleInfo(itemID=self.itemID))
success = self.internalHistory.submit(cmd)
sFit = Fit.getInstance()
if cmd.needsGuiRecalc:
eos.db.flush()
sFit.recalc(self.fitID)
sFit.fill(self.fitID)
eos.db.commit()
wx.PostEvent(gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,)))
return success
def Undo(self):
success = self.internalHistory.undoAll()
eos.db.flush()
sFit = Fit.getInstance()
sFit.recalc(self.fitID)
sFit.fill(self.fitID)
eos.db.commit()
wx.PostEvent(gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,)))
return success<|fim▁end|> | from service.fit import Fit
class GuiAddProjectedModuleCommand(wx.Command): |
<|file_name|>stats.js.src.js<|end_file_name|><|fim▁begin|>/* global define */
define([
'jquery',
'marionette'
], function($, Marionette) {
var CountItem = Marionette.ItemView.extend({
tagName: 'tr',
template: 'stats/count-item'
});
var CountList = Marionette.CompositeView.extend({
template: 'stats/count-list',
itemView: CountItem,
itemViewContainer: 'tbody',
ui: {
'statsTable': 'table',
'loader': '.loading-message'
},
events: {
'click thead th': 'handleSort'
},
collectionEvents: {
'sort': '_renderChildren',
'request': 'showLoader',
'reset': 'hideLoader'
},
hideLoader: function() {
this.ui.loader.hide();
this.ui.statsTable.show();
},
showLoader: function() {
this.ui.loader.show();
this.ui.statsTable.hide();
},
handleSort: function(event) {
if (!this.collection.length) return;
this.applySort($(event.target).data('sort'));
},
applySort: function(attr) {
var dir = 'asc';
// Already sorted by the attribute, cycle direction.
if (this.collection._sortAttr === attr) {
dir = this.collection._sortDir === 'asc' ? 'desc' : 'asc';<|fim▁hole|> this.$('[data-sort=' + attr + ']').addClass(dir);
// Reference for cycling.
this.collection._sortAttr = attr;
this.collection._sortDir = dir;
// Parse function for handling the sort attributes.
var parse = function(v) {
return v;
};
this.collection.comparator = function(m1, m2) {
var v1 = parse(m1.get(attr)),
v2 = parse(m2.get(attr));
if (v1 < v2) return (dir === 'asc' ? -1 : 1);
if (v1 > v2) return (dir === 'asc' ? 1 : -1);
return 0;
};
this.collection.sort();
}
});
return {
CountList: CountList
};
});<|fim▁end|> | }
this.$('[data-sort=' + this.collection._sortAttr + ']')
.removeClass(this.collection._sortDir); |
<|file_name|>baseLine.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>declare function configCreate(winWidth: number, customStyles: any): any;
export default configCreate;<|fim▁end|> | import "core-js/fn/object/assign"; |
<|file_name|>ex14_49_TEST.cpp<|end_file_name|><|fim▁begin|>#include "ex14_49.h"
int main()
{
Date date(12, 4, 2015);<|fim▁hole|><|fim▁end|> | if (static_cast<bool>(date))
std::cout << date << std::endl;
} |
<|file_name|>issue-17431-6.rs<|end_file_name|><|fim▁begin|>use std::sync::Mutex;
enum Foo { X(Mutex<Option<Foo>>) }
//~^ ERROR recursive type `Foo` has infinite size
impl Foo { fn bar(self) {} }<|fim▁hole|>
fn main() {}<|fim▁end|> | |
<|file_name|>grove_sound_sensor.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Jetduino Example for using the Grove Sound Sensor and the Grove LED
#
# The Jetduino connects the Jetson and Grove sensors. You can learn more about the Jetduino here: http://www.NeuroRoboticTech.com/Projects/Jetduino
#
# Modules:
# http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor
# http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit
#
# Have a question about this example? Ask on the forums here: http://www.NeuroRoboticTech.com/Forum
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Jetduino for the Jetson TK1/TX1: an open source platform for connecting
Grove Sensors to the Jetson embedded supercomputers.
Copyright (C) 2016 NeuroRobotic Technologies
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
<|fim▁hole|>The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import jetduino
from jetduino_pins import *
# Connect the Grove Sound Sensor to analog port A0
# SIG,NC,VCC,GND
sound_sensor = ARD_A0
# Connect the Grove LED to digital port D5
# SIG,NC,VCC,GND
led = LED_D4
jetduino.pinMode(led, OUTPUT_PIN)
# The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v
threshold_value = 600
while True:
try:
# Read the sound level
sensor_value = jetduino.analogRead(sound_sensor)
# If loud, illuminate LED, otherwise dim
if sensor_value > threshold_value:
jetduino.digitalWrite(led, HIGH)
else:
jetduino.digitalWrite(led, LOW)
print ("sensor_value =", sensor_value)
time.sleep(.5)
except IOError:
print ("Error")<|fim▁end|> | |
<|file_name|>coyoneda.js<|end_file_name|><|fim▁begin|>// Video: https://www.youtube.com/watch?v=WH5BrkzGgQY
const daggy = require('daggy')
const compose = (f, g) => x => f(g(x))
const id = x => x
//===============Define Coyoneda=========
// create constructor with props 'x' and 'f'
// 'x' is our value, 'f' is a function
const Coyoneda = daggy.tagged('x', 'f')
// map composes the function
Coyoneda.prototype.map = function(f) {
return Coyoneda(this.x, compose(f, this.f))
}
Coyoneda.prototype.lower = function() {
return this.x.map(this.f)
}
// lift starts off Coyoneda with the 'id' function
Coyoneda.lift = x => Coyoneda(x, id)
//===============Map over a non-Functor - Set =========
// Set does not have a 'map' method
const set = new Set([1, 1, 2, 3, 3, 4])
console.log("Set([1, 1, 2, 3, 3, 4]) : ", set)
// Wrap set into Coyoneda with 'id' function
const coyoResult = Coyoneda.lift(set)
.map(x => x + 1)
.map(x => `${x}!`)
console.log(
"Coyoneda.lift(set).map(x => x + 1).map(x => `${x}!`): ",
coyoResult
)
// equivalent to buildUpFn = coyoResult.f, ourSet = coyoResult.x
const {f: builtUpFn, x: ourSet} = coyoResult
console.log("builtUpFn is: ", builtUpFn, "; ourSet is: ", ourSet)
ourSet
.forEach(n => console.log(builtUpFn(n)))
// 2!
// 3!
// 4!
// 5!
<|fim▁hole|> Coyoneda.lift([1,2,3])
.map(x => x * 2)
.map(x => x - 1)
.lower()
)
// [ 1, 3, 5 ]
//===============Make Any Type a Functor=========
// Any object becomes a functor when placed in Coyoneda
const Container = daggy.tagged('x')
const tunacan = Container("tuna")
const res = Coyoneda.lift(tunacan)
.map(x => x.toUpperCase())
.map(x => x + '!')
const {f: fn, x: can} = res
console.log(fn(can.x))
// TUNA!<|fim▁end|> | //===============Lift a functor in (Array) and achieve Loop fusion=========
console.log(
`Coyoneda.lift([1,2,3]).map(x => x * 2).map(x => x - 1).lower() : `, |
<|file_name|>util.js<|end_file_name|><|fim▁begin|>function char2int(c) { return c.charCodeAt(0); }
var hexD = [ '1', '2', '3', '4', '5',
'6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' ];
hexD = ['0'].concat(hexD);
function hex(number) {
var str = "";
str = hexD[number&0xf] + str
str = hexD[(number>>=4)&0xf] + str ;
str = hexD[(number>>=4)&0xf] + str ;
str = hexD[(number>>=4)&0xf] + str ;
return str;
}
function hex2(number) {
var str = "";
str = hexD[number&0xf] + str
str = hexD[(number>>=4)&0xf] + str ;
return str;
}
function fromRunLenCodes(runLenArray, bitm) {
bitm = bitm || [];
var bit = runLenArray[0];
var runLenIdx = 1, bitIdx = 0;
var runLen = 0;
while (runLenIdx < runLenArray.length) {
runLen = runLenArray[runLenIdx];
while (runLen--) {
while ((INTBITLEN * (bitm.length)) < bitIdx) bitm.push(0);
if (bit) bitm[bitIdx >> D_INTBITLEN] |= (1 << (M_INTBITLEN & bitIdx));
bitIdx++ ;
}
runLenIdx++ ;
bit ^= 1;
}
return (bitm);
}
function arguments_or_eval(l) {
switch ( l ) {
case 'arguments':
case 'eval':
return true;
}<|fim▁hole|>
return false;
};
var has = Object.prototype.hasOwnProperty;
function fromcode(codePoint ) {
if ( codePoint <= 0xFFFF)
return String.fromCharCode(codePoint) ;
return String.fromCharCode(((codePoint-0x10000 )>>10)+0x0D800,
((codePoint-0x10000 )&(1024-1))+0x0DC00);
}
function core(n) { return n.type === PAREN ? n.expr : n; };
function toNum (n) {
return (n >= CH_0 && n <= CH_9) ? n - CH_0 :
(n <= CH_f && n >= CH_a) ? 10 + n - CH_a :
(n >= CH_A && n <= CH_F) ? 10 + n - CH_A : -1;
};<|fim▁end|> | |
<|file_name|>options.py<|end_file_name|><|fim▁begin|>from collections import OrderedDict
import copy
import operator
from functools import partial, reduce, update_wrapper
import warnings
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.admin import widgets, helpers
from django.contrib.admin import validation
from django.contrib.admin.checks import (BaseModelAdminChecks, ModelAdminChecks,
InlineModelAdminChecks)
from django.contrib.admin.exceptions import DisallowedModelAdminToField
from django.contrib.admin.utils import (quote, unquote, flatten_fieldsets,
get_deleted_objects, model_format_dict, NestedObjects,
lookup_needs_distinct)
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.auth import get_permission_codename
from django.core import checks
from django.core.exceptions import (PermissionDenied, ValidationError,
FieldError, ImproperlyConfigured)
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.db import models, transaction, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.related import RelatedObject
from django.db.models.fields import BLANK_CHOICE_DASH, FieldDoesNotExist
from django.db.models.sql.constants import QUERY_TERMS
from django.forms.formsets import all_valid, DELETION_FIELD_NAME
from django.forms.models import (modelform_factory, modelformset_factory,
inlineformset_factory, BaseInlineFormSet, modelform_defines_fields)
from django.http import Http404, HttpResponseRedirect
from django.http.response import HttpResponseBase
from django.shortcuts import get_object_or_404
from django.template.response import SimpleTemplateResponse, TemplateResponse
from django.utils import six
from django.utils.decorators import method_decorator
from django.utils.deprecation import (RenameMethodsBase,
RemovedInDjango18Warning, RemovedInDjango19Warning)
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import escape, escapejs
from django.utils.http import urlencode
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from django.utils.safestring import mark_safe
from django.views.decorators.csrf import csrf_protect
IS_POPUP_VAR = '_popup'
TO_FIELD_VAR = '_to_field'
HORIZONTAL, VERTICAL = 1, 2
def get_content_type_for_model(obj):
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level.
from django.contrib.contenttypes.models import ContentType
return ContentType.objects.get_for_model(obj, for_concrete_model=False)
def get_ul_class(radio_style):
return 'radiolist' if radio_style == VERTICAL else 'radiolist inline'
class IncorrectLookupParameters(Exception):
pass
# Defaults for formfield_overrides. ModelAdmin subclasses can change this
# by adding to ModelAdmin.formfield_overrides.
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
models.EmailField: {'widget': widgets.AdminEmailInputWidget},
}
csrf_protect_m = method_decorator(csrf_protect)
class RenameBaseModelAdminMethods(forms.MediaDefiningClass, RenameMethodsBase):
renamed_methods = (
('queryset', 'get_queryset', RemovedInDjango18Warning),
)
class BaseModelAdmin(six.with_metaclass(RenameBaseModelAdminMethods)):
"""Functionality common to both ModelAdmin and InlineAdmin."""
raw_id_fields = ()
fields = None
exclude = None
fieldsets = None
form = forms.ModelForm
filter_vertical = ()
filter_horizontal = ()
radio_fields = {}
prepopulated_fields = {}
formfield_overrides = {}
readonly_fields = ()
ordering = None
view_on_site = True
# Validation of ModelAdmin definitions
# Old, deprecated style:
validator_class = None
default_validator_class = validation.BaseValidator
# New style:
checks_class = BaseModelAdminChecks
@classmethod
def validate(cls, model):
warnings.warn(
'ModelAdmin.validate() is deprecated. Use "check()" instead.',
RemovedInDjango19Warning)
if cls.validator_class:
validator = cls.validator_class()
else:
validator = cls.default_validator_class()
validator.validate(cls, model)
@classmethod
def check(cls, model, **kwargs):
if cls.validator_class:
warnings.warn(
'ModelAdmin.validator_class is deprecated. '
'ModeAdmin validators must be converted to use '
'the system check framework.',
RemovedInDjango19Warning)
validator = cls.validator_class()
try:
validator.validate(cls, model)
except ImproperlyConfigured as e:
return [checks.Error(e.args[0], hint=None, obj=cls)]
else:
return []
else:
return cls.checks_class().check(cls, model, **kwargs)
def __init__(self):
overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor.
"""
request = kwargs.pop("request", None)
# If the field specifies choices, we don't need to look for special
# admin widgets - we just need to use a select widget of some kind.
if db_field.choices:
return self.formfield_for_choice_field(db_field, request, **kwargs)
# ForeignKey or ManyToManyFields
if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)):
# Combine the field kwargs with any options for formfield_overrides.
# Make sure the passed in **kwargs override anything in
# formfield_overrides because **kwargs is more specific, and should
# always win.
if db_field.__class__ in self.formfield_overrides:
kwargs = dict(self.formfield_overrides[db_field.__class__], **kwargs)
# Get the correct formfield.
if isinstance(db_field, models.ForeignKey):
formfield = self.formfield_for_foreignkey(db_field, request, **kwargs)
elif isinstance(db_field, models.ManyToManyField):
formfield = self.formfield_for_manytomany(db_field, request, **kwargs)
# For non-raw_id fields, wrap the widget with a wrapper that adds
# extra HTML -- the "add other" interface -- to the end of the
# rendered output. formfield can be None if it came from a
# OneToOneField with parent_link=True or a M2M intermediary.
if formfield and db_field.name not in self.raw_id_fields:
related_modeladmin = self.admin_site._registry.get(db_field.rel.to)
can_add_related = bool(related_modeladmin and
related_modeladmin.has_add_permission(request))
formfield.widget = widgets.RelatedFieldWidgetWrapper(
formfield.widget, db_field.rel, self.admin_site,
can_add_related=can_add_related)
return formfield
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs)
return db_field.formfield(**kwargs)
# For any other type of field, just call its formfield() method.
return db_field.formfield(**kwargs)
def formfield_for_choice_field(self, db_field, request=None, **kwargs):
"""
Get a form Field for a database Field that has declared choices.
"""
# If the field is named as a radio_field, use a RadioSelect
if db_field.name in self.radio_fields:
# Avoid stomping on custom widget/choices arguments.
if 'widget' not in kwargs:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
if 'choices' not in kwargs:
kwargs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('None'))]
)
return db_field.formfield(**kwargs)
def get_field_queryset(self, db, db_field, request):
"""
If the ModelAdmin specifies ordering, the queryset should respect that
ordering. Otherwise don't specify the queryset, let the field decide
(returns None in that case).
"""
related_admin = self.admin_site._registry.get(db_field.rel.to, None)
if related_admin is not None:
ordering = related_admin.get_ordering(request)
if ordering is not None and ordering != ():
return db_field.rel.to._default_manager.using(db).order_by(*ordering)
return None
def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
"""
Get a form Field for a ForeignKey.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.rel,
self.admin_site, using=db)
elif db_field.name in self.radio_fields:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
kwargs['empty_label'] = _('None') if db_field.blank else None
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
"""
Get a form Field for a ManyToManyField.
"""
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if not db_field.rel.through._meta.auto_created:
return None
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.rel,
self.admin_site, using=db)
kwargs['help_text'] = ''
elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)):
kwargs['widget'] = widgets.FilteredSelectMultiple(db_field.verbose_name, (db_field.name in self.filter_vertical))
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def get_view_on_site_url(self, obj=None):
if obj is None or not self.view_on_site:
return None
if callable(self.view_on_site):
return self.view_on_site(obj)
elif self.view_on_site and hasattr(obj, 'get_absolute_url'):
# use the ContentType lookup if view_on_site is True
return reverse('admin:view_on_site', kwargs={
'content_type_id': get_content_type_for_model(obj).pk,
'object_id': obj.pk
})
@property
def declared_fieldsets(self):
warnings.warn(
"ModelAdmin.declared_fieldsets is deprecated and "
"will be removed in Django 1.9.",
RemovedInDjango19Warning, stacklevel=2
)
if self.fieldsets:
return self.fieldsets
elif self.fields:
return [(None, {'fields': self.fields})]
return None
def get_fields(self, request, obj=None):
"""
Hook for specifying fields.
"""
return self.fields
def get_fieldsets(self, request, obj=None):
"""
Hook for specifying fieldsets.
"""
# We access the property and check if it triggers a warning.
# If it does, then it's ours and we can safely ignore it, but if
# it doesn't then it has been overridden so we must warn about the
# deprecation.
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
declared_fieldsets = self.declared_fieldsets
if len(w) != 1 or not issubclass(w[0].category, RemovedInDjango19Warning):
warnings.warn(
"ModelAdmin.declared_fieldsets is deprecated and "
"will be removed in Django 1.9.",
RemovedInDjango19Warning
)
if declared_fieldsets:
return declared_fieldsets
if self.fieldsets:
return self.fieldsets
return [(None, {'fields': self.get_fields(request, obj)})]
def get_ordering(self, request):
"""
Hook for specifying field ordering.
"""
return self.ordering or () # otherwise we might try to *None, which is bad ;)
def get_readonly_fields(self, request, obj=None):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
"""
Hook for specifying custom prepopulated fields.
"""
return self.prepopulated_fields
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site. This is used by changelist_view.
"""
qs = self.model._default_manager.get_queryset()
# TODO: this should be handled by some parameter to the ChangeList.
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def lookup_allowed(self, lookup, value):
from django.contrib.admin.filters import SimpleListFilter
model = self.model
# Check FKey lookups that are allowed, so that popups produced by
# ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to,
# are allowed to work.
for l in model._meta.related_fkey_lookups:
# As ``limit_choices_to`` can be a callable, invoke it here.
if callable(l):
l = l()
for k, v in widgets.url_params_from_lookup_dict(l).items():
if k == lookup and v == value:
return True
parts = lookup.split(LOOKUP_SEP)
# Last term in lookup is a query term (__exact, __startswith etc)
# This term can be ignored.
if len(parts) > 1 and parts[-1] in QUERY_TERMS:
parts.pop()
# Special case -- foo__id__exact and foo__id queries are implied
# if foo has been specifically included in the lookup list; so
# drop __id if it is the last part. However, first we need to find
# the pk attribute name.
rel_name = None
for part in parts[:-1]:
try:
field, _, _, _ = model._meta.get_field_by_name(part)
except FieldDoesNotExist:
# Lookups on non-existent fields are ok, since they're ignored
# later.
return True
if hasattr(field, 'rel'):
if field.rel is None:
# This property or relation doesn't exist, but it's allowed
# since it's ignored in ChangeList.get_filters().
return True
model = field.rel.to
if hasattr(field.rel, 'get_related_field'):
rel_name = field.rel.get_related_field().name
else:
rel_name = None
elif isinstance(field, RelatedObject):
model = field.model
rel_name = model._meta.pk.name
else:
rel_name = None
if rel_name and len(parts) > 1 and parts[-1] == rel_name:
parts.pop()
if len(parts) == 1:
return True
clean_lookup = LOOKUP_SEP.join(parts)
valid_lookups = [self.date_hierarchy]
for filter_item in self.list_filter:
if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter):
valid_lookups.append(filter_item.parameter_name)
elif isinstance(filter_item, (list, tuple)):
valid_lookups.append(filter_item[0])
else:
valid_lookups.append(filter_item)
return clean_lookup in valid_lookups
def to_field_allowed(self, request, to_field):
"""
Returns True if the model associated with this admin should be
allowed to be referenced by the specified field.
"""
opts = self.model._meta
try:
field = opts.get_field(to_field)
except FieldDoesNotExist:
return False
# Check whether this model is the origin of a M2M relationship
# in which case to_field has to be the pk on this model.
if opts.many_to_many and field.primary_key:
return True
# Make sure at least one of the models registered for this site
# references this field through a FK or a M2M relationship.
registered_models = set()
for model, admin in self.admin_site._registry.items():
registered_models.add(model)
for inline in admin.inlines:
registered_models.add(inline.model)
for related_object in (opts.get_all_related_objects(include_hidden=True) +
opts.get_all_related_many_to_many_objects()):
related_model = related_object.model
if (any(issubclass(model, related_model) for model in registered_models) and
related_object.field.rel.get_related_field() == field):
return True
return False
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overridden by the user in subclasses.
"""
opts = self.opts
codename = get_permission_codename('add', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('delete', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
@python_2_unicode_compatible
class ModelAdmin(BaseModelAdmin):
"Encapsulates all admin options and functionality for a given model."
list_display = ('__str__',)
list_display_links = ()
list_filter = ()
list_select_related = False
list_per_page = 100
list_max_show_all = 200
list_editable = ()
search_fields = ()
date_hierarchy = None
save_as = False
save_on_top = False
paginator = Paginator
preserve_filters = True
inlines = []
# Custom templates (designed to be over-ridden in subclasses)
add_form_template = None
change_form_template = None
change_list_template = None
delete_confirmation_template = None
delete_selected_confirmation_template = None
object_history_template = None
# Actions
actions = []
action_form = helpers.ActionForm
actions_on_top = True
actions_on_bottom = False
actions_selection_counter = True
# validation
# Old, deprecated style:
default_validator_class = validation.ModelAdminValidator
# New style:
checks_class = ModelAdminChecks
def __init__(self, model, admin_site):
self.model = model
self.opts = model._meta
self.admin_site = admin_site
super(ModelAdmin, self).__init__()
def __str__(self):
return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__)
def get_inline_instances(self, request, obj=None):
inline_instances = []
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
if not (inline.has_add_permission(request) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request):
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
def get_urls(self):
from django.conf.urls import patterns, url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
urlpatterns = patterns('',
url(r'^$', wrap(self.changelist_view), name='%s_%s_changelist' % info),
url(r'^add/$', wrap(self.add_view), name='%s_%s_add' % info),
url(r'^(.+)/history/$', wrap(self.history_view), name='%s_%s_history' % info),
url(r'^(.+)/delete/$', wrap(self.delete_view), name='%s_%s_delete' % info),
url(r'^(.+)/$', wrap(self.change_view), name='%s_%s_change' % info),
)
return urlpatterns
def urls(self):
return self.get_urls()
urls = property(urls)
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = [
'core.js',
'admin/RelatedObjectLookups.js',
'jquery%s.js' % extra,
'jquery.init.js'
]
if self.actions is not None:
js.append('actions%s.js' % extra)
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_model_perms(self, request):
"""
Returns a dict of all perms for this model. This dict has the keys
``add``, ``change``, and ``delete`` mapping to the True/False for each
of those actions.
"""
return {
'add': self.has_add_permission(request),
'change': self.has_change_permission(request),
'delete': self.has_delete_permission(request),
}
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_form(request, obj, fields=None)
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistent with the
# default on modelform_factory
exclude = exclude or None
defaults = {
"form": self.form,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
try:
return modelform_factory(self.model, **defaults)
except FieldError as e:
raise FieldError('%s. Check fields/fieldsets/exclude attributes of class %s.'
% (e, self.__class__.__name__))
def get_changelist(self, request, **kwargs):
"""
Returns the ChangeList class for use on the changelist page.
"""
from django.contrib.admin.views.main import ChangeList
return ChangeList
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.get_queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError, ValueError):
return None
def get_changelist_form(self, request, **kwargs):
"""
Returns a Form class for use in the Formset on the changelist page.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if (defaults.get('fields') is None
and not modelform_defines_fields(defaults.get('form'))):
defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults)
def get_changelist_formset(self, request, **kwargs):
"""
Returns a FormSet class for use on the changelist page if list_editable
is used.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelformset_factory(self.model,
self.get_changelist_form(request), extra=0,
fields=self.list_editable, **defaults)
def _get_formsets(self, request, obj):
"""
Helper function that exists to allow the deprecation warning to be
executed while this function continues to return a generator.
"""
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj)
def get_formsets(self, request, obj=None):
warnings.warn(
"ModelAdmin.get_formsets() is deprecated and will be removed in "
"Django 1.9. Use ModelAdmin.get_formsets_with_inlines() instead.",
RemovedInDjango19Warning, stacklevel=2
)
return self._get_formsets(request, obj)
def get_formsets_with_inlines(self, request, obj=None):
"""
Yields formsets and the corresponding inlines.
"""
# We call get_formsets() [deprecated] and check if it triggers a
# warning. If it does, then it's ours and we can safely ignore it, but
# if it doesn't then it has been overridden so we must warn about the
# deprecation.
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
formsets = self.get_formsets(request, obj)
if len(w) != 1 or not issubclass(w[0].category, RemovedInDjango19Warning):
warnings.warn(
"ModelAdmin.get_formsets() is deprecated and will be removed in "
"Django 1.9. Use ModelAdmin.get_formsets_with_inlines() instead.",
RemovedInDjango19Warning
)
if formsets:
zipped = zip(formsets, self.get_inline_instances(request, None))
for formset, inline in zipped:
yield formset, inline
else:
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj), inline
def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True):
return self.paginator(queryset, per_page, orphans, allow_empty_first_page)
def log_addition(self, request, object):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=ADDITION
)
def log_change(self, request, object, message):
"""
Log that an object has been successfully changed.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, CHANGE
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=CHANGE,
change_message=message
)
def log_deletion(self, request, object, object_repr):
"""
Log that an object will be deleted. Note that this method must be
called before the deletion.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, DELETION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=object_repr,
action_flag=DELETION
)
def action_checkbox(self, obj):
"""
A list_display column containing a checkbox widget.
"""
return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, force_text(obj.pk))
action_checkbox.short_description = mark_safe('<input type="checkbox" id="action-toggle" />')
action_checkbox.allow_tags = True
def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitly set to None that means that we don't
# want *any* actions enabled on this page.
from django.contrib.admin.views.main import _is_changelist_popup
if self.actions is None or _is_changelist_popup(request):
return OrderedDict()
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description))
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue
actions.extend(self.get_action(action) for action in class_actions)
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into an OrderedDict keyed by name.
actions = OrderedDict(
(name, (func, name, desc))
for func, name, desc in actions
)
return actions
def get_action_choices(self, request, default_choices=BLANK_CHOICE_DASH):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = [] + default_choices
for func, name, description in six.itervalues(self.get_actions(request)):
choice = (name, description % model_format_dict(self.opts))
choices.append(choice)
return choices
def get_action(self, action):
"""
Return a given action from a parameter, which can either be a callable,
or the name of a method on the ModelAdmin. Return is a tuple of
(callable, name, description).
"""
# If the action is a callable, just use it.
if callable(action):
func = action
action = action.__name__
# Next, look for a method. Grab it off self.__class__ to get an unbound
# method instead of a bound one; this ensures that the calling
# conventions are the same for functions and methods.
elif hasattr(self.__class__, action):
func = getattr(self.__class__, action)
# Finally, look for a named method on the admin site
else:
try:
func = self.admin_site.get_action(action)
except KeyError:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description
def get_list_display(self, request):
"""
Return a sequence containing the fields to be displayed on the
changelist.
"""
return self.list_display
def get_list_display_links(self, request, list_display):
"""
Return a sequence containing the fields to be displayed as links
on the changelist. The list_display parameter is the list of fields
returned by get_list_display().
"""
if self.list_display_links or self.list_display_links is None or not list_display:
return self.list_display_links
else:
# Use only the first item in list_display as link
return list(list_display)[:1]
def get_list_filter(self, request):
"""
Returns a sequence containing the fields to be displayed as filters in
the right sidebar of the changelist page.
"""
return self.list_filter
def get_search_fields(self, request):
"""
Returns a sequence containing the fields to be searched whenever
somebody submits a search query.
"""
return self.search_fields
def get_search_results(self, request, queryset, search_term):
"""
Returns a tuple containing a queryset to implement the search,
and a boolean indicating if the results may contain duplicates.
"""
# Apply keyword searches.
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
search_fields = self.get_search_fields(request)
if search_fields and search_term:
orm_lookups = [construct_search(str(search_field))
for search_field in search_fields]
for bit in search_term.split():
or_queries = [models.Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
if lookup_needs_distinct(self.opts, search_spec):
use_distinct = True
break
return queryset, use_distinct
def get_preserved_filters(self, request):
"""
Returns the preserved filters querystring.
"""
match = request.resolver_match
if self.preserve_filters and match:
opts = self.model._meta
current_url = '%s:%s' % (match.app_name, match.url_name)
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
if current_url == changelist_url:
preserved_filters = request.GET.urlencode()
else:
preserved_filters = request.GET.get('_changelist_filters')
if preserved_filters:
return urlencode({'_changelist_filters': preserved_filters})
return ''
def construct_change_message(self, request, form, formsets):
"""
Construct a change message from a changed object.
"""
change_message = []
if form.changed_data:
change_message.append(_('Changed %s.') % get_text_list(form.changed_data, _('and')))
if formsets:
for formset in formsets:
for added_object in formset.new_objects:
change_message.append(_('Added %(name)s "%(object)s".')
% {'name': force_text(added_object._meta.verbose_name),
'object': force_text(added_object)})
for changed_object, changed_fields in formset.changed_objects:
change_message.append(_('Changed %(list)s for %(name)s "%(object)s".')
% {'list': get_text_list(changed_fields, _('and')),
'name': force_text(changed_object._meta.verbose_name),
'object': force_text(changed_object)})
for deleted_object in formset.deleted_objects:
change_message.append(_('Deleted %(name)s "%(object)s".')
% {'name': force_text(deleted_object._meta.verbose_name),
'object': force_text(deleted_object)})
change_message = ' '.join(change_message)
return change_message or _('No fields changed.')
def message_user(self, request, message, level=messages.INFO, extra_tags='',
fail_silently=False):
"""
Send a message to the user. The default implementation
posts a message using the django.contrib.messages backend.
Exposes almost the same API as messages.add_message(), but accepts the
positional arguments in a different order to maintain backwards
compatibility. For convenience, it accepts the `level` argument as
a string rather than the usual level number.
"""
if not isinstance(level, int):
# attempt to get the level if passed a string
try:
level = getattr(messages.constants, level.upper())
except AttributeError:
levels = messages.constants.DEFAULT_TAGS.values()
levels_repr = ', '.join('`%s`' % l for l in levels)
raise ValueError('Bad message level string: `%s`. '
'Possible values are: %s' % (level, levels_repr))
messages.add_message(request, level, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
return form.save(commit=False)
def save_model(self, request, obj, form, change):
"""
Given a model instance save it to the database.
"""
obj.save()
def delete_model(self, request, obj):
"""
Given a model instance delete it from the database.
"""
obj.delete()
def save_formset(self, request, form, formset, change):
"""
Given an inline formset save it to the database.
"""
formset.save()
def save_related(self, request, form, formsets, change):
"""
Given the ``HttpRequest``, the parent ``ModelForm`` instance, the
list of inline formsets and a boolean value based on whether the
parent is being added or changed, save the related objects to the
database. Note that at this point save_form() and save_model() have
already been called.
"""
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=change)
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
opts = self.model._meta
app_label = opts.app_label
preserved_filters = self.get_preserved_filters(request)
form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url)
view_on_site_url = self.get_view_on_site_url(obj)
context.update({
'add': add,
'change': change,
'has_add_permission': self.has_add_permission(request),
'has_change_permission': self.has_change_permission(request, obj),
'has_delete_permission': self.has_delete_permission(request, obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': view_on_site_url is not None,
'absolute_url': view_on_site_url,
'form_url': form_url,
'opts': opts,
'content_type_id': get_content_type_for_model(self.model).pk,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
'to_field_var': TO_FIELD_VAR,
'is_popup_var': IS_POPUP_VAR,
'app_label': app_label,
})
if add and self.add_form_template is not None:
form_template = self.add_form_template
else:
form_template = self.change_form_template
return TemplateResponse(request, form_template or [
"admin/%s/%s/change_form.html" % (app_label, opts.model_name),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
], context, current_app=self.admin_site.name)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage.
"""
opts = obj._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)}
# Here, we distinguish between different save types by checking for
# the presence of keys in request.POST.
if IS_POPUP_VAR in request.POST:
to_field = request.POST.get(TO_FIELD_VAR)
if to_field:
attr = str(to_field)
else:
attr = obj._meta.pk.attname
value = obj.serializable_value(attr)
return SimpleTemplateResponse('admin/popup_response.html', {
'pk_value': escape(pk_value), # for possible backwards-compatibility
'value': escape(value),
'obj': escapejs(obj)
})
elif "_continue" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
if post_url_continue is None:
post_url_continue = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(quote(pk_value),),
current_app=self.admin_site.name)
post_url_continue = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue)
return HttpResponseRedirect(post_url_continue)
elif "_addanother" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may add another %(name)s below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = _('The %(name)s "%(obj)s" was added successfully.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_add(request, obj)
def response_change(self, request, obj):
"""
Determines the HttpResponse for the change_view stage.
"""
opts = self.model._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)}
if "_continue" in request.POST:
msg = _('The %(name)s "%(obj)s" was changed successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_saveasnew" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(pk_value,),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_addanother" in request.POST:
msg = _('The %(name)s "%(obj)s" was changed successfully. You may add another %(name)s below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_add' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = _('The %(name)s "%(obj)s" was changed successfully.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_change(request, obj)
def response_post_save_add(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when adding a new object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):<|fim▁hole|> post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_post_save_change(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when editing an existing object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_action(self, request, queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func = self.get_actions(request)[action][0]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
return None
if not select_across:
# Perform the action only on the selected objects
queryset = queryset.filter(pk__in=selected)
response = func(self, request, queryset)
# Actions may return an HttpResponse-like object, which will be
# used as the response from the POST. If not, we'll be a good
# little HTTP citizen and redirect back to the changelist page.
if isinstance(response, HttpResponseBase):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg, messages.WARNING)
return None
def response_delete(self, request, obj_display):
"""
Determines the HttpResponse for the delete_view stage.
"""
opts = self.model._meta
self.message_user(request,
_('The %(name)s "%(obj)s" was deleted successfully.') % {
'name': force_text(opts.verbose_name),
'obj': force_text(obj_display)
}, messages.SUCCESS)
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts}, post_url
)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def render_delete_form(self, request, context):
opts = self.model._meta
app_label = opts.app_label
return TemplateResponse(request,
self.delete_confirmation_template or [
"admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name),
"admin/{}/delete_confirmation.html".format(app_label),
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
def get_inline_formsets(self, request, formsets, inline_instances,
obj=None):
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request, obj))
readonly = list(inline.get_readonly_fields(request, obj))
prepopulated = dict(inline.get_prepopulated_fields(request, obj))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, prepopulated, readonly, model_admin=self)
inline_admin_formsets.append(inline_admin_formset)
return inline_admin_formsets
def get_changeform_initial_data(self, request):
"""
Get the initial form data.
Unless overridden, this populates from the GET params.
"""
initial = dict(request.GET.items())
for k in initial:
try:
f = self.model._meta.get_field(k)
except models.FieldDoesNotExist:
continue
# We have to special-case M2Ms as a list of comma-separated PKs.
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
return initial
@csrf_protect_m
@transaction.atomic
def changeform_view(self, request, object_id=None, form_url='', extra_context=None):
to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
if to_field and not self.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
model = self.model
opts = model._meta
add = object_id is None
if add:
if not self.has_add_permission(request):
raise PermissionDenied
obj = None
else:
obj = self.get_object(request, unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name), 'key': escape(object_id)})
if request.method == 'POST' and "_saveasnew" in request.POST:
return self.add_view(request, form_url=reverse('admin:%s_%s_add' % (
opts.app_label, opts.model_name),
current_app=self.admin_site.name))
ModelForm = self.get_form(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
if form.is_valid():
form_validated = True
new_object = self.save_form(request, form, change=not add)
else:
form_validated = False
new_object = form.instance
formsets, inline_instances = self._create_formsets(request, new_object)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, not add)
self.save_related(request, form, formsets, not add)
if add:
self.log_addition(request, new_object)
return self.response_add(request, new_object)
else:
change_message = self.construct_change_message(request, form, formsets)
self.log_change(request, new_object, change_message)
return self.response_change(request, new_object)
else:
if add:
initial = self.get_changeform_initial_data(request)
form = ModelForm(initial=initial)
formsets, inline_instances = self._create_formsets(request, self.model())
else:
form = ModelForm(instance=obj)
formsets, inline_instances = self._create_formsets(request, obj)
adminForm = helpers.AdminForm(
form,
list(self.get_fieldsets(request, obj)),
self.get_prepopulated_fields(request, obj),
self.get_readonly_fields(request, obj),
model_admin=self)
media = self.media + adminForm.media
inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj)
for inline_formset in inline_formsets:
media = media + inline_formset.media
context = dict(self.admin_site.each_context(),
title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name),
adminform=adminForm,
object_id=object_id,
original=obj,
is_popup=(IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
to_field=to_field,
media=media,
inline_admin_formsets=inline_formsets,
errors=helpers.AdminErrorList(form, formsets),
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url)
def add_view(self, request, form_url='', extra_context=None):
return self.changeform_view(request, None, form_url, extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
return self.changeform_view(request, object_id, form_url, extra_context)
@csrf_protect_m
def changelist_view(self, request, extra_context=None):
"""
The 'change list' admin view for this model.
"""
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
raise PermissionDenied
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
search_fields = self.get_search_fields(request)
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
if actions:
# Add the action checkboxes if there are any actions available.
list_display = ['action_checkbox'] + list(list_display)
ChangeList = self.get_changelist(request)
try:
cl = ChangeList(request, self.model, list_display,
list_display_links, list_filter, self.date_hierarchy,
search_fields, self.list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given
# and the 'invalid=1' parameter was already in the query string,
# something is screwed up with the database, so display an error
# page.
if ERROR_FLAG in request.GET.keys():
return SimpleTemplateResponse('admin/invalid_setup.html', {
'title': _('Database error'),
})
return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1')
# If the request was POSTed, this might be a bulk action or a bulk
# edit. Try to look up an action or confirmation first, but if this
# isn't an action the POST will fall through to the bulk edit check,
# below.
action_failed = False
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
action_failed = True
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
# If we're allowing changelist editing, we need to construct a formset
# for the changelist given all the fields to be edited. Then we'll
# use the formset to validate/process POSTed data.
formset = cl.formset = None
# Handle POSTed bulk-edit data.
if (request.method == "POST" and cl.list_editable and
'_save' in request.POST and not action_failed):
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(request.POST, request.FILES, queryset=cl.result_list)
if formset.is_valid():
changecount = 0
for form in formset.forms:
if form.has_changed():
obj = self.save_form(request, form, change=True)
self.save_model(request, obj, form, change=True)
self.save_related(request, form, formsets=[], change=True)
change_msg = self.construct_change_message(request, form, None)
self.log_change(request, obj, change_msg)
changecount += 1
if changecount:
if changecount == 1:
name = force_text(opts.verbose_name)
else:
name = force_text(opts.verbose_name_plural)
msg = ungettext("%(count)s %(name)s was changed successfully.",
"%(count)s %(name)s were changed successfully.",
changecount) % {'count': changecount,
'name': name,
'obj': force_text(obj)}
self.message_user(request, msg, messages.SUCCESS)
return HttpResponseRedirect(request.get_full_path())
# Handle GET -- construct a formset for display.
elif cl.list_editable:
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(queryset=cl.result_list)
# Build the list of media to be used by the formset.
if formset:
media = self.media + formset.media
else:
media = self.media
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', cl.result_count)
context = dict(
self.admin_site.each_context(),
module_name=force_text(opts.verbose_name_plural),
selection_note=_('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)},
selection_note_all=selection_note_all % {'total_count': cl.result_count},
title=cl.title,
is_popup=cl.is_popup,
to_field=cl.to_field,
cl=cl,
media=media,
has_add_permission=self.has_add_permission(request),
opts=cl.opts,
action_form=action_form,
actions_on_top=self.actions_on_top,
actions_on_bottom=self.actions_on_bottom,
actions_selection_counter=self.actions_selection_counter,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return TemplateResponse(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.model_name),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context, current_app=self.admin_site.name)
@csrf_protect_m
@transaction.atomic
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') %
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
)
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
return self.response_delete(request, obj_display)
object_name = force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = dict(
self.admin_site.each_context(),
title=title,
object_name=object_name,
object=obj,
deleted_objects=deleted_objects,
perms_lacking=perms_needed,
protected=protected,
opts=opts,
app_label=app_label,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return self.render_delete_form(request, context)
def history_view(self, request, object_id, extra_context=None):
"The 'history' admin view for this model."
from django.contrib.admin.models import LogEntry
# First check if the user can see this history.
model = self.model
obj = get_object_or_404(self.get_queryset(request), pk=unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
# Then get the history for this object.
opts = model._meta
app_label = opts.app_label
action_list = LogEntry.objects.filter(
object_id=unquote(object_id),
content_type=get_content_type_for_model(model)
).select_related().order_by('action_time')
context = dict(self.admin_site.each_context(),
title=_('Change history: %s') % force_text(obj),
action_list=action_list,
module_name=capfirst(force_text(opts.verbose_name_plural)),
object=obj,
opts=opts,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return TemplateResponse(request, self.object_history_template or [
"admin/%s/%s/object_history.html" % (app_label, opts.model_name),
"admin/%s/object_history.html" % app_label,
"admin/object_history.html"
], context, current_app=self.admin_site.name)
def _create_formsets(self, request, obj):
"Helper function to generate formsets for add/change_view."
formsets = []
inline_instances = []
prefixes = {}
get_formsets_args = [request]
if obj.pk:
get_formsets_args.append(obj)
for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset_params = {
'instance': obj,
'prefix': prefix,
'queryset': inline.get_queryset(request),
}
if request.method == 'POST':
formset_params.update({
'data': request.POST,
'files': request.FILES,
'save_as_new': '_saveasnew' in request.POST
})
formsets.append(FormSet(**formset_params))
inline_instances.append(inline)
return formsets, inline_instances
class InlineModelAdmin(BaseModelAdmin):
"""
Options for inline editing of ``model`` instances.
Provide ``fk_name`` to specify the attribute name of the ``ForeignKey``
from ``model`` to its parent. This is required if ``model`` has more than
one ``ForeignKey`` to its parent.
"""
model = None
fk_name = None
formset = BaseInlineFormSet
extra = 3
min_num = None
max_num = None
template = None
verbose_name = None
verbose_name_plural = None
can_delete = True
checks_class = InlineModelAdminChecks
def __init__(self, parent_model, admin_site):
self.admin_site = admin_site
self.parent_model = parent_model
self.opts = self.model._meta
super(InlineModelAdmin, self).__init__()
if self.verbose_name is None:
self.verbose_name = self.model._meta.verbose_name
if self.verbose_name_plural is None:
self.verbose_name_plural = self.model._meta.verbose_name_plural
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = ['jquery%s.js' % extra, 'jquery.init.js', 'inlines%s.js' % extra]
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
if self.filter_vertical or self.filter_horizontal:
js.extend(['SelectBox.js', 'SelectFilter2.js'])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_extra(self, request, obj=None, **kwargs):
"""Hook for customizing the number of extra inline forms."""
return self.extra
def get_min_num(self, request, obj=None, **kwargs):
"""Hook for customizing the min number of inline forms."""
return self.min_num
def get_max_num(self, request, obj=None, **kwargs):
"""Hook for customizing the max number of extra inline forms."""
return self.max_num
def get_formset(self, request, obj=None, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# InlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# If exclude is an empty list we use None, since that's the actual
# default.
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission(request, obj)
defaults = {
"form": self.form,
"formset": self.formset,
"fk_name": self.fk_name,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
"extra": self.get_extra(request, obj, **kwargs),
"min_num": self.get_min_num(request, obj, **kwargs),
"max_num": self.get_max_num(request, obj, **kwargs),
"can_delete": can_delete,
}
defaults.update(kwargs)
base_model_form = defaults['form']
class DeleteProtectedModelForm(base_model_form):
def hand_clean_DELETE(self):
"""
We don't validate the 'DELETE' field itself because on
templates it's not rendered using the field information, but
just using a generic "deletion_field" of the InlineModelAdmin.
"""
if self.cleaned_data.get(DELETION_FIELD_NAME, False):
using = router.db_for_write(self._meta.model)
collector = NestedObjects(using=using)
collector.collect([self.instance])
if collector.protected:
objs = []
for p in collector.protected:
objs.append(
# Translators: Model verbose name and instance representation, suitable to be an item in a list
_('%(class_name)s %(instance)s') % {
'class_name': p._meta.verbose_name,
'instance': p}
)
params = {'class_name': self._meta.model._meta.verbose_name,
'instance': self.instance,
'related_objects': get_text_list(objs, _('and'))}
msg = _("Deleting %(class_name)s %(instance)s would require "
"deleting the following protected related objects: "
"%(related_objects)s")
raise ValidationError(msg, code='deleting_protected', params=params)
def is_valid(self):
result = super(DeleteProtectedModelForm, self).is_valid()
self.hand_clean_DELETE()
return result
defaults['form'] = DeleteProtectedModelForm
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
return inlineformset_factory(self.parent_model, self.model, **defaults)
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_formset(request, obj, fields=None).form
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_queryset(self, request):
queryset = super(InlineModelAdmin, self).get_queryset(request)
if not self.has_change_permission(request):
queryset = queryset.none()
return queryset
def has_add_permission(self, request):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request)
return super(InlineModelAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
opts = self.opts
if opts.auto_created:
# The model was auto-created as intermediary for a
# ManyToMany-relationship, find the target model
for field in opts.fields:
if field.rel and field.rel.to != self.parent_model:
opts = field.rel.to._meta
break
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request, obj)
return super(InlineModelAdmin, self).has_delete_permission(request, obj)
class StackedInline(InlineModelAdmin):
template = 'admin/edit_inline/stacked.html'
class TabularInline(InlineModelAdmin):
template = 'admin/edit_inline/tabular.html'<|fim▁end|> | post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request) |
<|file_name|>almanac.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2009, 2011, 2012 Tom Keffer <[email protected]>
#
# See the file LICENSE.txt for your full rights.
#
# $Revision: 1046 $
# $Author: tkeffer $
# $Date: 2013-02-21 06:38:26 -0800 (Thu, 21 Feb 2013) $
#
"""Almanac data
This module can optionally use PyEphem, which offers high quality
astronomical calculations. See http://rhodesmill.org/pyephem. """
import time
import sys
import math
import weeutil.Moon
import weewx.units
# If the user has installed ephem, use it. Otherwise, fall back to the weeutil algorithms:
try:
import ephem
except ImportError:
import weeutil.Sun
# NB: In order to avoid an 'autocall' bug in Cheetah versions before 2.1,
# this class must not be a "new-style" class.
class Almanac():
"""Almanac data.
ATTRIBUTES.
As a minimum, the following attributes are available:
sunrise: Time (local) upper limb of the sun rises above the horizon, formatted using the format 'timeformat'.
sunset: Time (local) upper limb of the sun sinks below the horizon, formatted using the format 'timeformat'.
moon_phase: A description of the moon phase(eg. "new moon", Waxing crescent", etc.)
moon_fullness: Percent fullness of the moon (0=new moon, 100=full moon)
If the module 'ephem' is used, them many other attributes are available.
Here are a few examples:
sun.rise: Time upper limb of sun will rise above the horizon today in unix epoch time
sun.transit: Time of transit today (sun over meridian) in unix epoch time
sun.previous_sunrise: Time of last sunrise in unix epoch time
sun.az: Azimuth (in degrees) of sun
sun.alt: Altitude (in degrees) of sun
mars.rise: Time when upper limb of mars will rise above horizon today in unix epoch time
mars.ra: Right ascension of mars
etc.
EXAMPLES (note that these will only work in the Pacific Time Zone)
>>> t = 1238180400
>>> print timestamp_to_string(t)
2009-03-27 12:00:00 PDT (1238180400)
>>> almanac = Almanac(t, 46.0, -122.0)
Test backwards compatibility with attribute 'moon_fullness':
>>> print "Fullness of the moon (rounded) is %.2f%% [%s]" % (almanac.moon_fullness, almanac.moon_phase)
Fullness of the moon (rounded) is 2.00% [new (totally dark)]
Now get a more precise result for fullness of the moon:
>>> print "Fullness of the moon (more precise) is %.2f%%" % almanac.moon.moon_phase
Fullness of the moon (more precise) is 1.70%
Test backwards compatibility with attributes 'sunrise' and 'sunset'
>>> print "Sunrise, sunset:", almanac.sunrise, almanac.sunset
Sunrise, sunset: 06:56 19:30
Get sunrise, sun transit, and sunset using the new 'ephem' syntax:
>>> print "Sunrise, sun transit, sunset:", almanac.sun.rise, almanac.sun.transit, almanac.sun.set
Sunrise, sun transit, sunset: 06:56 13:13 19:30
Do the same with the moon:
>>> print "Moon rise, transit, set:", almanac.moon.rise, almanac.moon.transit, almanac.moon.set
Moon rise, transit, set: 06:59 14:01 21:20
Exercise equinox, solstice routines<|fim▁hole|> >>> print almanac.next_autumnal_equinox
22-Sep-2009 14:18
>>> print almanac.next_summer_solstice
20-Jun-2009 22:45
>>> print almanac.previous_winter_solstice
21-Dec-2008 04:03
>>> print almanac.next_winter_solstice
21-Dec-2009 09:46
Exercise moon state routines
>>> print almanac.next_full_moon
09-Apr-2009 07:55
>>> print almanac.next_new_moon
24-Apr-2009 20:22
>>> print almanac.next_first_quarter_moon
02-Apr-2009 07:33
Now location of the sun and moon
>>> print "Solar azimuth, altitude = (%.2f, %.2f)" % (almanac.sun.az, almanac.sun.alt)
Solar azimuth, altitude = (154.14, 44.02)
>>> print "Moon azimuth, altitude = (%.2f, %.2f)" % (almanac.moon.az, almanac.moon.alt)
Moon azimuth, altitude = (133.55, 47.89)
Try the pyephem "Naval Observatory" example.
>>> t = 1252252800
>>> print timestamp_to_gmtime(t)
2009-09-06 16:00:00 UTC (1252252800)
>>> atlanta = Almanac(t, 33.8, -84.4, pressure=0, horizon=-34.0/60.0)
>>> # Print it in GMT, so it can easily be compared to the example:
>>> print timestamp_to_gmtime(atlanta.sun.previous_rising.raw)
2009-09-06 11:14:56 UTC (1252235696)
>>> print timestamp_to_gmtime(atlanta.moon.next_setting.raw)
2009-09-07 14:05:29 UTC (1252332329)
Now try the civil twilight examples:
>>> print timestamp_to_gmtime(atlanta(horizon=-6).sun(use_center=1).previous_rising.raw)
2009-09-06 10:49:40 UTC (1252234180)
>>> print timestamp_to_gmtime(atlanta(horizon=-6).sun(use_center=1).next_setting.raw)
2009-09-07 00:21:22 UTC (1252282882)
"""
def __init__(self, time_ts, lat, lon,
altitude=None, # Use 'None' in case a bad value is passed in
temperature=None, # "
pressure=None, # "
horizon=None, # "
moon_phases=weeutil.Moon.moon_phases,
formatter=weewx.units.Formatter()):
"""Initialize an instance of Almanac
time_ts: A unix epoch timestamp with the time of the almanac. If None, the
present time will be used.
lat, lon: Observer's location
altitude: Observer's elevation in **meters**. [Optional. Default is 0 (sea level)]
temperature: Observer's temperature in **degrees Celsius**. [Optional. Default is 15.0]
pressure: Observer's atmospheric pressure in **mBars**. [Optional. Default is 1010]
horizon: Angle of the horizon in degrees [Optional. Default is zero]
moon_phases: An array of 8 strings with descriptions of the moon
phase. [optional. If not given, then weeutil.Moon.moon_phases will be used]
formatter: An instance of weewx.units.Formatter() with the formatting information
to be used.
"""
self.time_ts = time_ts if time_ts else time.time()
self.time_djd = timestamp_to_djd(self.time_ts)
self.lat = lat
self.lon = lon
self.altitude = altitude if altitude is not None else 0.0
self.temperature = temperature if temperature is not None else 15.0
self.pressure = pressure if pressure is not None else 1010.0
self.horizon = horizon if horizon is not None else 0.0
self.moon_phases = moon_phases
self.formatter = formatter
(y,m,d) = time.localtime(self.time_ts)[0:3]
(self.moon_index, self._moon_fullness) = weeutil.Moon.moon_phase(y, m, d)
self.moon_phase = self.moon_phases[self.moon_index]
# Check to see whether the user has module 'ephem'.
if 'ephem' in sys.modules:
self.hasExtras = True
else:
# No ephem package. Use the weeutil algorithms, which supply a minimum of functionality
(sunrise_utc, sunset_utc) = weeutil.Sun.sunRiseSet(y, m, d, self.lon, self.lat)
# The above function returns its results in UTC hours. Convert
# to a local time tuple
sunrise_tt = weeutil.weeutil.utc_to_local_tt(y, m, d, sunrise_utc)
sunset_tt = weeutil.weeutil.utc_to_local_tt(y, m, d, sunset_utc)
self._sunrise = time.strftime("%H:%M", sunrise_tt)
self._sunset = time.strftime("%H:%M", sunset_tt)
self.hasExtras = False
# Shortcuts, used for backwards compatibility
@property
def sunrise(self):
return self.sun.rise if self.hasExtras else self._sunrise
@property
def sunset(self):
return self.sun.set if self.hasExtras else self._sunset
@property
def moon_fullness(self):
return int(self.moon.moon_phase+0.5) if self.hasExtras else self._moon_fullness
# What follows is a bit of Python wizardry to allow syntax such as:
# almanac(horizon=-0.5).sun.rise
def __call__(self, **kwargs):
"""Call an almanac object as a functor. This allows overriding the values
used when the Almanac instance was initialized.
Named arguments:
Any named arguments will be passed on to the initializer of the ObserverBinder,
overriding any default values. These are all optional:
almanac_time: The observer's time in unix epoch time.
lat: The observer's latitude in degrees
lon: The observer's longitude in degrees
altitude: The observer's altitude in meters
horizon: The horizon angle in degrees
temperature: The observer's temperature (used to calculate refraction)
pressure: The observer's pressure (used to calculate refraction)
"""
# Using an encapsulated class allows easy access to the default values
class ObserverBinder(object):
# Use the default values provided by the outer class (Almanac):
def __init__(self, almanac_time=self.time_ts, lat=self.lat, lon=self.lon,
altitude=self.altitude, horizon=self.horizon, temperature=self.temperature,
pressure=self.pressure, formatter=self.formatter):
# Build an ephem Observer object
self.observer = ephem.Observer()
self.observer.date = timestamp_to_djd(almanac_time)
self.observer.lat = math.radians(lat)
self.observer.long = math.radians(lon)
self.observer.elev = altitude
self.observer.horizon = math.radians(horizon)
self.observer.temp = temperature
self.observer.pressure= pressure
self.formatter = formatter
def __getattr__(self, body):
"""Return a BodyWrapper that binds the observer to a heavenly body.
If there is no such body an exception of type AttributeError will
be raised.
body: A heavenly body. Examples, 'sun', 'moon', 'jupiter'
Returns:
An instance of a BodyWrapper. It will bind together the heavenly
body (an instance of something like ephem.Jupiter) and the observer
(an instance of ephem.Observer)
"""
# Find the module used by pyephem. For example, the module used for
# 'mars' is 'ephem.Mars'. If there is no such module, an exception
# of type AttributeError will get thrown.
ephem_module = getattr(ephem, body.capitalize())
# Now, together with the observer object, return an
# appropriate BodyWrapper
return BodyWrapper(ephem_module, self.observer, self.formatter)
# This will override the default values with any explicit parameters in kwargs:
return ObserverBinder(**kwargs)
def __getattr__(self, attr):
if not self.hasExtras:
# If the Almanac does not have extended capabilities, we can't
# do any of the following. Raise an exception.
raise AttributeError, "Unknown attribute %s" % attr
# We do have extended capability. Check to see if the attribute is a calendar event:
elif attr in ['previous_equinox', 'next_equinox',
'previous_solstice', 'next_solstice',
'previous_autumnal_equinox', 'next_autumnal_equinox',
'previous_vernal_equinox', 'next_vernal_equinox',
'previous_winter_solstice', 'next_winter_solstice',
'previous_summer_solstice', 'next_summer_solstice',
'previous_new_moon', 'next_new_moon',
'previous_first_quarter_moon', 'next_first_quarter_moon',
'previous_full_moon', 'next_full_moon',
'previous_last_quarter_moon', 'next_last_quarter_moon']:
# This is how you call a function on an instance when all you have
# is the function's name as a string
djd = getattr(ephem, attr)(self.time_djd)
return weewx.units.ValueHelper((djd, "dublin_jd", "group_time"),
context="ephem_year", formatter=self.formatter)
else:
# It's not a calendar event. The attribute must be a heavenly body
# (such as 'sun', or 'jupiter'). Create an instance of
# ObserverBinder by calling the __call__ function in Almanac, but
# with no parameters
binder = self()
# Now try getting the body as an attribute. If successful, an
# instance of BodyWrapper will be returned. If not, an exception of
# type AttributeError will be raised.
return getattr(binder, attr)
fn_map = {'rise' : 'next_rising',
'set' : 'next_setting',
'transit' : 'next_transit'}
class BodyWrapper(object):
"""This class wraps a celestial body. It returns results in degrees (instead of radians)
and percent (instead of fractions). For times, it returns the results as a ValueHelper.
It also deals with the unfortunate design decision in pyephem to change
the state of the celestial body when using it as an argument in certain functions."""
def __init__(self, body_factory, observer, formatter):
"""Initialize a wrapper
body_factory: A function that returns an instance of the body
to be wrapped. Example would be ephem.Sun
observer: An instance of ephem.Observer, containing the observer's lat, lon, time, etc.
formatter: An instance of weewx.units.Formatter(), containing the formatting
to be used for times.
"""
self.body_factory = body_factory
self.observer = observer
self.formatter = formatter
self.body = body_factory(observer)
self.use_center = False
# Calculate and store the start-of-day in Dublin Julian Days:
(y,m,d) = time.localtime(djd_to_timestamp(observer.date))[0:3]
self.sod_djd = timestamp_to_djd(time.mktime((y,m,d,0,0,0,0,0,-1)))
def __call__(self, use_center=False):
self.use_center = use_center
return self
def __getattr__(self, attr):
if attr in ['az', 'alt', 'a_ra', 'a_dec', 'g_ra', 'ra', 'g_dec', 'dec',
'elong', 'radius', 'hlong', 'hlat', 'sublat', 'sublong']:
# Return the results in degrees rather than radians
return math.degrees(getattr(self.body, attr))
elif attr=='moon_phase':
# Return the result in percent
return 100.0 * self.body.moon_phase
elif attr in ['next_rising', 'next_setting', 'next_transit', 'next_antitransit',
'previous_rising', 'previous_setting', 'previous_transit', 'previous_antitransit']:
# These functions have the unfortunate side effect of changing the state of the body
# being examined. So, create a temporary body and then throw it away
temp_body = self.body_factory()
time_djd = getattr(self.observer, attr)(temp_body, use_center=self.use_center)
return weewx.units.ValueHelper((time_djd, "dublin_jd", "group_time"), context="ephem_day", formatter=self.formatter)
elif attr in fn_map:
# These attribute names have to be mapped to a different function name. Like the
# attributes above, they also have the side effect of changing the state of the body.
# Finally, they return the time of the event anywhere in the day (not just the next
# event), so they take a second argument in the function call.
temp_body = self.body_factory(self.observer)
# Look up the function to be called for this attribute (eg, call 'next_rising' for 'rise')
fn = fn_map[attr]
# Call the function, with a second argument giving the start-of-day
time_djd = getattr(self.observer, fn)(temp_body, self.sod_djd)
return weewx.units.ValueHelper((time_djd, "dublin_jd", "group_time"), context="ephem_day", formatter=self.formatter)
else:
# Just return the result unchanged.
return getattr(self.body, attr)
def timestamp_to_djd(time_ts):
"""Convert from a unix time stamp to the number of days since 12/31/1899 12:00 UTC
(aka "Dublin Julian Days")"""
# The number 25567.5 is the start of the Unix epoch (1/1/1970). Just add on the
# number of days since then
return 25567.5 + time_ts/86400.0
def djd_to_timestamp(djd):
"""Convert from number of days since 12/31/1899 12:00 UTC ("Dublin Julian Days") to unix time stamp"""
return (djd-25567.5) * 86400.0
if __name__ == '__main__':
import doctest
from weeutil.weeutil import timestamp_to_string, timestamp_to_gmtime #@UnusedImport
doctest.testmod()<|fim▁end|> | >>> print almanac.next_vernal_equinox
20-Mar-2010 10:32 |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub use self::index_pool::IndexPool;
pub use self::debug_draw::DebugDraw;
pub use self::timer::Timer;
use cgmath::*;
use std::ops::Mul;
use std::f32;
mod index_pool;
mod timer;
pub mod debug_draw;
//
// Global tuning constants based on meters-kilograms-seconds (MKS) units.
//
// Collision
/// The maximum number of vertices on a convex polygon.
pub const MAX_POLYGON_VERTICES: usize = 8;
/// This is used to fatten Aabbs in the dynamic tree. This allows proxies to move by a small
/// amount without triggering a tree adjustment. This is in meters
pub const AABB_EXTENSION: f32 = 0.1;
/// A small length used as a collision and constraint tolerance. Usually it is chosen to be
/// numerically significant, but visually insignificant.
pub const LINEAR_SLOP: f32 = 0.005;
/// The radius of the polygon/edge shape skin. This should not be modified. Making this smaller
/// means polygons will have an insufficient buffer for continues collision. Making it larger
/// may create artifacts for vertex collision.
pub const POLYGON_RADIUS: f32 = 2.0 * LINEAR_SLOP;
/// Maximum number of sub-steps per contact in continuous physics simulation.
pub const MAX_SUB_STEPS: u32 = 8;
// Dynamics
/// Maximum number of iterations per TOI impact.
pub const MAX_TOI_ITERATIONS: usize = 20;
/// Maximum number of contacts to be handled to solve a TOI impact.
pub const MAX_TOI_CONTACTS: usize = 32;
/// A velocity threshold for elastic collisions. Any collision with a relative linear velocity
/// below this threshold will be treated as inelasti
pub const VELOCITY_THRESHOLD: f32 = 1.0;
/// The maximum linear position correction used when solving constraints. This helps to
/// prevent overshoot.
pub const MAX_LINEAR_CORRECTION: f32 = 0.2;
/// The maximum linear velocity of a body. This limit is very large and is used
/// to prevent numerical problems. You shouldn't need to adjust this.
pub const MAX_TRANSLATION: f32 = 2.0;
pub const MAX_TRANSLATION_SQUARED: f32 = MAX_TRANSLATION * MAX_TRANSLATION;
/// The maximum angular velocity of a body. This limit is very large and is used
/// to prevent numerical problems. You shouldn't need to adjust this.
pub const MAX_ROTATION: f32 = 0.5 * f32::consts::PI;
pub const MAX_ROTATION_SQUARED: f32 = MAX_ROTATION * MAX_ROTATION;
/// This scale factor controls how fast overlap is resolved. Ideally this would be 1 so
/// that overlap is removed in one time step. However, using values close to 1 often lead
/// to overshoot.
pub const BAUMGARTE: f32 = 0.2;
pub const TOI_BAUMGARTE: f32 = 0.75;
/// Performs the cross product on a vector and a scalar. In 2D this produces a vector.
pub fn cross_v_s(v: &Vector2<f32>, s: f32) -> Vector2<f32> {
Vector2::<f32> {
x: s * v.y,
y: -s * v.x,
}
}
/// Performs the cross product on a scalar and a vector. In 2D this produces a vector.
pub fn cross_s_v(s: f32, v: &Vector2<f32>) -> Vector2<f32> {
Vector2::<f32> {
x: -s * v.y,
y: s * v.x,
}
}
pub fn clamp_f32(s: f32, low: f32, high: f32) -> f32 {
f32::max(low, f32::min(s, high))
}
#[derive(Clone, Copy, Debug)]
pub struct Rotation2d {
sin: f32,
cos: f32,
}
impl Default for Rotation2d {
/// Constructs a new identity rotation.
fn default() -> Rotation2d {
Rotation2d {
sin: 0.0,
cos: 1.0,
}
}
}
impl Rotation2d {
/// Constructs a new rotation from an angle.
pub fn new(angle: f32) -> Self {
Rotation2d {
sin: angle.sin(),
cos: angle.cos(),
}
}
/// Sets the rotation from an angle.
pub fn set_angle(&mut self, angle: f32) {
self.sin = angle.sin();
self.cos = angle.cos();
}
/// Returns the angle in radians.
pub fn get_angle(&self) -> f32 {
f32::atan2(self.sin, self.cos)
}
/// Multiplies this rotation with the supplied one.
pub fn mul(&self, rhs: &Rotation2d) -> Rotation2d {
// q = self, r = rhs
// [qc -qs] * [rc -rs] = [qc*rc-qs*rs -qc*rs-qs*rc]
// [qs qc] [rs rc] [qs*rc+qc*rs -qs*rs+qc*rc]
Rotation2d {
sin: self.sin * rhs.cos + self.cos * rhs.sin,
cos: self.cos * rhs.cos - self.sin * rhs.sin,
}
}
/// Multiplies the transpose of this rotation with the supplied one
pub fn mul_t(&self, rhs: &Rotation2d) -> Rotation2d {<|fim▁hole|> // [ qc qs] * [rc -rs] = [qc*rc+qs*rs -qc*rs+qs*rc]
// [-qs qc] [rs rc] [-qs*rc+qc*rs qs*rs+qc*rc]
Rotation2d {
sin: self.cos * rhs.sin - self.sin * rhs.cos,
cos: self.cos * rhs.cos + self.sin * rhs.sin,
}
}
/// Rotates a vector
pub fn apply(&self, v: &Vector2<f32>) -> Vector2<f32> {
// q = self
// [qc -qs] * [x] = [qc*x - qs*y]
// [qs qc] [y] [qs*x + qc*y]
Vector2::<f32> {
x: self.cos * v.x - self.sin * v.y,
y: self.sin * v.x + self.cos * v.y,
}
}
/// Inverse rotates a vector
pub fn apply_t(&self, v: &Vector2<f32>) -> Vector2<f32> {
// q = self
// [ qc qs] * [x] = [qc*x + qs*y]
// [-qs qc] [y] [qs*x + qc*y]
Vector2::<f32> {
x: self.cos * v.x + self.sin * v.y,
y: -self.sin * v.x + self.cos * v.y,
}
}
}
/// A transform contains translation and rotation. It is used to represent the position
/// and orientation of rigid frames.
#[derive(Clone, Copy, Debug)]
pub struct Transform2d {
pub position: Vector2<f32>,
pub rotation: Rotation2d,
}
impl Default for Transform2d {
/// Constructs a new identity transform.
fn default() -> Transform2d {
Transform2d {
position: Vector2::zero(),
rotation: Default::default(),
}
}
}
impl Transform2d {
/// Constructs a new transform with the given position and rotation.
pub fn new(position: Vector2<f32>, rotation: Rotation2d) -> Self {
Transform2d {
position: position,
rotation: rotation,
}
}
pub fn mul(&self, rhs: &Transform2d) -> Transform2d {
Transform2d {
position: self.rotation.apply(&rhs.position) + self.position,
rotation: self.rotation.mul(&rhs.rotation),
}
}
pub fn mul_t(&self, rhs: &Transform2d) -> Transform2d {
Transform2d {
position: self.rotation.apply_t(&(rhs.position - self.position)),
rotation: self.rotation.mul_t(&rhs.rotation),
}
}
pub fn apply(&self, v: &Vector2<f32>) -> Vector2<f32> {
Vector2::<f32> {
x: self.rotation.cos * v.x - self.rotation.sin * v.y + self.position.x,
y: self.rotation.sin * v.x + self.rotation.cos * v.y + self.position.y,
}
}
pub fn apply_t(&self, v: &Vector2<f32>) -> Vector2<f32> {
let p = v - self.position;
Vector2::<f32> {
x: self.rotation.cos * p.x + self.rotation.sin * p.y,
y: -self.rotation.sin * p.x + self.rotation.cos * p.y,
}
}
}
/// This describes the motion of a body/shape for TOI computation. Shapes are defined
/// with respect to the body origin, which may not coincide with the center of mass.
/// However, to support dynamics we must interpolate the center of mass position.
#[derive(Clone, Copy)]
pub struct Sweep {
/// Local center of mass position
pub local_center: Vector2<f32>,
/// center world position at `alpha0`
pub c0: Vector2<f32>,
/// center world position
pub c: Vector2<f32>,
/// world angle at `alpha0`
pub a0: f32,
/// world angle
pub a: f32,
/// Fraction of the current time step in the range [0, 1]
pub alpha0: f32,
}
impl Default for Sweep {
fn default() -> Sweep {
Sweep {
local_center: Vector2::zero(),
c0: Vector2::zero(),
c: Vector2::zero(),
a0: 0.0,
a: 0.0,
alpha0: 0.0,
}
}
}
impl Sweep {
/// Get the interpolated transform at a specific time. `beta` is a factor in [0, 1],
/// where 0 indicates `alpha0`
pub fn get_transform(&self, beta: f32) -> Transform2d {
let mut result = Transform2d::new(
self.c0 * (1.0 - beta) + self.c * beta,
Rotation2d::new(self.a0 * (1.0 - beta) + self.a * beta),
);
/*let mut result = Transform2d::default();
result.position = self.c0 * (1.0 - beta) + self.c * beta;
result.rotation.set_angle(self.a0 * (1.0 - beta) + self.a * beta);*/
// Shift to origin.
result.position -= result.rotation.apply(&self.local_center);
result
}
/// Advance the sweep forward, yielding a new initial state. `alpha` is the new
/// initial time.
pub fn advance(&mut self, alpha: f32) {
assert!(self.alpha0 < 1.0);
let beta = (alpha - self.alpha0) / (1.0 - self.alpha0);
self.c0 += (self.c - self.c0) * beta;
self.a0 += (self.a - self.a0) * beta;
self.alpha0 = alpha;
}
/// Normalize the angles.
pub fn normalize(&mut self) {
let two_pi = 2.0 * f32::consts::PI;
let d = two_pi * (self.a0 / two_pi).floor();
self.a0 -= d;
self.a -= d;
}
}<|fim▁end|> | // q = self, r = rhs |
<|file_name|>DocumentPartByPathController.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package com.indoqa.daisy.cocoon.controller;
import org.apache.cocoon.rest.controller.annotation.SitemapParameter;
import org.apache.cocoon.rest.controller.response.RestResponse;
import org.apache.cocoon.rest.controller.response.URLResponse;
public class DocumentPartByPathController extends AbstractDocumentController {
@SitemapParameter
private String part;
@Override
public RestResponse sendSuccessResponse(String id) throws Exception {
return new URLResponse("/default/doc/id/" + id + "/part/" + this.part + ".html");
}
}<|fim▁end|> | |
<|file_name|>ExpandingToolBar.cpp<|end_file_name|><|fim▁begin|>/**********************************************************************
Audacity: A Digital Audio Editor
ExpandingToolBar.cpp
Dominic Mazzoni
*******************************************************************//**
\class ExpandingToolBar
\brief A smart ToolBar class that has a "MainPanel" which is always
displayed, and an "ExtraPanel" that can be hidden to save space.
Can be docked into a ToolBarArea or floated in an ToolBarFrame;
If auto-expanding is off, behavior is very simple: clicking the
toggle button expands, clicking it again collapses.
If auto-expanding is on, behavior is a little more complicated.
When the mouse movers over a toolbar and it is collapsed, it gets
auto-expanded, and it gets auto-collapsed as soon as the mouse
leaves. However, if they manually toggle it collapsed
while it was auto-expanded, it will stay collapsed until you move
the mouse completely away and then back again later. If you
manually expand it, it will stay manually expanded until you
manually collapse it.
*//****************************************************************//**
\class ExpandingToolBarEvtHandler
\brief A custom event handler for ExpandingToolBar.
*//****************************************************************//**
\class ToolBarGrabber
\brief Draws the grabber for an ExpandingToolBar.
*//****************************************************************//**
\class ToolBarDialog
\brief A dialog based container for ExpandingToolBars providing modal
based operations.
*//****************************************************************//**
\class ToolBarFrame
\brief A miniframe based container for ExpandingToolBars providing
modeless presentation.
*//****************************************************************//**
\class ToolBarArea
\brief An alterantive to ToolBarFrame which can contain an
ExpandingToolBar. ToolBarArea is used for a 'docked' ToolBar,
ToolBarFrame for a floating one.
*//****************************************************************//**
\class ToolBarArrangement
\brief Small class that holds some layout information for an
ExpandingToolBar.
*//*******************************************************************/
#include "../Theme.h"
// For compilers that support precompilation, includes "wx/wx.h".
#include <wx/wxprec.h>
#ifndef WX_PRECOMP
#include <wx/window.h>
#endif
#include <wx/wx.h>
#include <wx/dcmemory.h>
#include <wx/log.h>
#include <wx/dragimag.h>
#include <wx/arrimpl.cpp>
#include <wx/dialog.h>
#include "ExpandingToolBar.h"
#include "AButton.h"
#include "../AllThemeResources.h"
#include "../Experimental.h"
const int kToggleButtonHeight = 8;
const int kTimerInterval = 50; // every 50 ms -> ~20 updates per second
const wxRect kDummyRect = wxRect(-9999, -9999, 0, 0);
enum {
kToggleButtonID = 5000,
kTimerID
};
WX_DEFINE_OBJARRAY(wxArrayRect);
class ToolBarArrangement
{
public:
ExpandingToolBarArray childArray;
wxArrayRect rectArray;
wxArrayInt rowArray;
};
//
// ExpandingToolBar
//
BEGIN_EVENT_TABLE(ExpandingToolBar, wxPanel)
EVT_SIZE(ExpandingToolBar::OnSize)
EVT_TIMER(kTimerID, ExpandingToolBar::OnTimer)
EVT_BUTTON(kToggleButtonID, ExpandingToolBar::OnToggle)
END_EVENT_TABLE()
IMPLEMENT_CLASS(ExpandingToolBar, wxPanel)
//static
int ExpandingToolBar::msNoAutoExpandStack = 0;
ExpandingToolBar::ExpandingToolBar(wxWindow* parent,
wxWindowID id,
const wxPoint& pos,
const wxSize& size):
wxPanel(parent, id, pos, size),
mIsAutoExpanded(false),
mIsManualExpanded(false),
mIsExpanded(false),
mAutoExpand(true),
mFirstTime(true),
mFrameParent(NULL),
mDialogParent(NULL),
mAreaParent(NULL),
mSavedArrangement(NULL),
mDragImage(NULL),
mTopLevelParent(NULL)
{
mMainPanel = new wxPanel(this, -1,
wxDefaultPosition, wxSize(1, 1));
mExtraPanel = new wxPanel(this, -1,
wxDefaultPosition, wxSize(1, 1));
mGrabber = NULL;
ToolBarArea *toolBarParent =
dynamic_cast<ToolBarArea *>(GetParent());
if (toolBarParent)
mGrabber = new ToolBarGrabber(this, -1, this);
/// \todo check whether this is a memory leak (and check similar code)
wxImage hbar = theTheme.Image(bmpToolBarToggle);
wxColour magicColor = wxColour(0, 255, 255);
ImageArray fourStates = ImageRoll::SplitV(hbar, magicColor);
mToggleButton = new AButton(this, kToggleButtonID,
wxDefaultPosition, wxDefaultSize,
ImageRoll(ImageRoll::HorizontalRoll,
fourStates[0], magicColor),
ImageRoll(ImageRoll::HorizontalRoll,
fourStates[1], magicColor),
ImageRoll(ImageRoll::HorizontalRoll,
fourStates[2], magicColor),
ImageRoll(ImageRoll::HorizontalRoll,
fourStates[3], magicColor),
true);
mToggleButton->UseDisabledAsDownHiliteImage(true);
SetAutoLayout(true);
mTimer.SetOwner(this, kTimerID);
}
ExpandingToolBar::~ExpandingToolBar()
{
}
void ExpandingToolBar::OnSize(wxSizeEvent & WXUNUSED(event))
{
if (mFrameParent || mDialogParent || mAreaParent)
return;
// At the time of construction, it wasn't "safe" to tell
// our parent that we've just joined the window, so we check
// for it during our first OnSize event.
if (!mFrameParent) {
ToolBarFrame *toolBarParent =
dynamic_cast<ToolBarFrame *>(GetParent());
if (toolBarParent) {
// We were placed into a floating window
mFrameParent = toolBarParent;
toolBarParent->SetChild(this);
}
}
if (!mDialogParent) {
ToolBarDialog *toolBarParent =
dynamic_cast<ToolBarDialog *>(GetParent());
if (toolBarParent) {
// We were placed into a dialog
mDialogParent = toolBarParent;
toolBarParent->SetChild(this);
}
}
if (!mAreaParent) {
ToolBarArea *toolBarParent =
dynamic_cast<ToolBarArea *>(GetParent());
if (toolBarParent) {
// We were placed into an area full of other toolbars
mAreaParent = toolBarParent;
toolBarParent->AddChild(this);
}
}
}
void ExpandingToolBar::OnToggle(wxCommandEvent & WXUNUSED(event))
{
if (mIsExpanded)
Collapse();
else
Expand();
}
void ExpandingToolBar::Expand()
{
// We set both mIsManualExpanded and mIsAutoExpanded to true;
// that way if the user manually collapses the toolbar we set
// mIsManualExpanded to false but keep mIsAutoExpanded to true
// to prevent it from being auto-expanded again until the user
// actually moves the mouse completely away and back again later.
mToggleButton->PushDown();
mIsManualExpanded = true;
mIsAutoExpanded = true;
Fit();
}
void ExpandingToolBar::Collapse(bool now /* = false */)
{
// After being manually collapsed, we set mIsAutoExpanded back to
// true, which prevents it from being immediately auto-expanded
// again until after the mouse actually moves away and then
// back again later.
mToggleButton->PopUp();
mIsManualExpanded = false;
mIsAutoExpanded = false;
Fit();
mIsAutoExpanded = true;
if (now) {
mCurrentDrawerSize = mTargetDrawerSize;
MoveDrawer(wxSize(0, 0));
}
}
void ExpandingToolBar::TryAutoExpand()
{
if (mAutoExpand && msNoAutoExpandStack==0 &&
mIsManualExpanded == false && mIsAutoExpanded == false) {
mToggleButton->PushDown();
mIsAutoExpanded = true;
Fit();
}
}
void ExpandingToolBar::TryAutoCollapse()
{
#ifdef EXPERIMENTAL_ROLL_UP_DIALOG
if (mIsAutoExpanded == true && mIsManualExpanded == false) {
mToggleButton->PopUp();
mIsAutoExpanded = false;
Fit();
}
#endif
}
class ExpandingToolBarEvtHandler : public wxEvtHandler
{
public:
ExpandingToolBarEvtHandler(ExpandingToolBar *toolbar,
wxEvtHandler *inheritedEvtHandler)
{
mToolBar = toolbar;
mInheritedEvtHandler = inheritedEvtHandler;
}
virtual bool ProcessEvent(wxEvent& evt)
{
if (mToolBar->IsCursorInWindow())
mToolBar->TryAutoExpand();
else
mToolBar->TryAutoExpand();
return mInheritedEvtHandler->ProcessEvent(evt);
}
protected:
ExpandingToolBar *mToolBar;
wxEvtHandler *mInheritedEvtHandler;
DECLARE_NO_COPY_CLASS(ExpandingToolBarEvtHandler);
};
void ExpandingToolBar::RecursivelyPushEventHandlers(wxWindow *win)
{
if (!mWindowHash[win]) {
ExpandingToolBarEvtHandler *evtHandler =
new ExpandingToolBarEvtHandler(this, win->GetEventHandler());
win->PushEventHandler(evtHandler);
mWindowHash[win] = 1;
}
wxWindowList children = win->GetChildren();
typedef wxWindowList::compatibility_iterator Node;
for(Node node = children.GetFirst(); node; node = node->GetNext()) {
wxWindow *child = node->GetData();
RecursivelyPushEventHandlers(child);
}
}
bool ExpandingToolBar::Layout()
{
mMainSize = mMainPanel->GetBestSize();
mExtraSize = mExtraPanel->GetBestSize();
mButtonSize = wxSize(wxMax(mMainSize.x, mExtraSize.x),
kToggleButtonHeight);
int left = 0;
if (mGrabber) {
mGrabberSize = mGrabber->GetMinSize();
left += mGrabberSize.x;
}
else
mGrabberSize = wxSize(0, 0);
mMainPanel->SetSize(left, 0, mMainSize.x, mMainSize.y);
mToggleButton->SetSize(left, mMainSize.y, mButtonSize.x, mButtonSize.y);
mExtraPanel->SetSize(left, mMainSize.y + mButtonSize.y,
mExtraSize.x, mExtraSize.y);
if (mGrabber)
mGrabber->SetSize(0, 0, left, mMainSize.y + mButtonSize.y);
// Add event handlers to all children
//RecursivelyPushEventHandlers(this);
return true;
}
void ExpandingToolBar::Fit()
{
#ifdef EXPERIMENTAL_ROLL_UP_DIALOG
mIsExpanded = (mIsAutoExpanded || mIsManualExpanded);
#else
mIsExpanded = true;// JKC - Wedge it open at all times.
#endif
int width = mButtonSize.x + mGrabberSize.x;
wxSize baseWindowSize = wxSize(width,
mMainSize.y + mButtonSize.y);
mTargetDrawerSize = wxSize(mButtonSize.x, 0);
if (mIsExpanded)
mTargetDrawerSize.y += mExtraSize.y;
mCurrentDrawerSize.x = mTargetDrawerSize.x;
// The first time, we always update the size. Otherwise, we set
// a target size, and the actual size changes during a timer
// event.
if (mFirstTime) {
mFirstTime = false;
mCurrentDrawerSize = wxSize(mExtraSize.x, 0);
mCurrentTotalSize = baseWindowSize;
SetSizeHints(mCurrentTotalSize, mCurrentTotalSize);
SetSize(mCurrentTotalSize);
}
// wxTimers seem to be a little unreliable - sometimes they stop for
// no good reason, so this "primes" it every now and then...
mTimer.Stop();
mTimer.Start(kTimerInterval);
}
bool ExpandingToolBar::IsCursorInWindow()
{
wxPoint globalMouse = ::wxGetMousePosition();
wxPoint localMouse = ScreenToClient(globalMouse);
bool result = (localMouse.x >= 0 && localMouse.y >= 0 &&
localMouse.x < mCurrentTotalSize.x &&
localMouse.y < mCurrentTotalSize.y);
<|fim▁hole|> return result;
}
void ExpandingToolBar::ReparentExtraPanel()
{
// This is how we make sure the extra panel, which slides out
// like a drawer, appears on top of everything else in the window...
wxPoint pos;
pos.x = mGrabberSize.x;
pos.y = mMainSize.y + mButtonSize.y;
wxWindow *frame = this;
while(!frame->IsTopLevel()) {
pos += frame->GetPosition();
frame = frame->GetParent();
}
mExtraPanel->Reparent(frame);
mExtraPanel->SetPosition(pos);
}
void ExpandingToolBar::MoveDrawer(wxSize prevSize)
{
mCurrentTotalSize = wxSize(mButtonSize.x,
mMainSize.y +
mButtonSize.y +
mCurrentDrawerSize.y);
if (mFrameParent) {
// If we're in a tool window
SetSizeHints(mCurrentTotalSize, mCurrentTotalSize);
SetSize(mCurrentTotalSize);
GetParent()->Fit();
}
if (mDialogParent) {
// If we're in a dialog
SetSizeHints(mCurrentTotalSize, mCurrentTotalSize);
SetSize(mCurrentTotalSize);
GetParent()->Fit();
}
if (mAreaParent) {
// If we're in a tool area
if (mCurrentDrawerSize.y > 0 && prevSize.y == 0) {
ReparentExtraPanel();
mExtraPanel->Show();
}
mExtraPanel->SetSizeHints(mCurrentDrawerSize, mCurrentDrawerSize);
mExtraPanel->SetSize(mCurrentDrawerSize);
if (mCurrentDrawerSize.y == 0)
mExtraPanel->Hide();
}
}
void ExpandingToolBar::OnTimer(wxTimerEvent & WXUNUSED(event))
{
if (mAutoExpand && msNoAutoExpandStack==0 &&
IsCursorInWindow())
TryAutoExpand();
else if (!IsCursorInWindow())
TryAutoCollapse();
if (mCurrentDrawerSize == mTargetDrawerSize)
return;
// This accelerates the current size towards the target size;
// it's a neat way for the window to roll open, but in such a
// way that it
wxSize prevSize = mCurrentDrawerSize;
mCurrentDrawerSize = (mCurrentDrawerSize*2 + mTargetDrawerSize) / 3;
if (abs((mCurrentDrawerSize-mTargetDrawerSize).x)<2 &&
abs((mCurrentDrawerSize-mTargetDrawerSize).y)<2)
mCurrentDrawerSize = mTargetDrawerSize;
MoveDrawer(prevSize);
}
wxBitmap ExpandingToolBar::GetToolbarBitmap()
{
wxSize size = GetClientSize();
wxBitmap bitmap(size.x, size.y);
wxClientDC winDC(this);
wxMemoryDC memDC;
memDC.SelectObject(bitmap);
memDC.Blit(0, 0, size.x, size.y,
&winDC, 0, 0);
return bitmap;
}
void ExpandingToolBar::StartMoving()
{
if (!mAreaParent)
return;
int j;
mAreaParent->CollapseAll(true);
mTimer.Stop();
// This gives time for wx to finish redrawing the window that way.
// HACK: why do we need to do it so many times???
for(j=0; j<500; j++)
::wxSafeYield();
wxBitmap toolbarBitmap = GetToolbarBitmap();
msNoAutoExpandStack++;
mSavedArrangement = mAreaParent->SaveArrangement();
mAreaParent->RemoveChild(this);
mAreaParent->Refresh(true);
mTopLevelParent = this;
while(!mTopLevelParent->IsTopLevel())
mTopLevelParent = mTopLevelParent->GetParent();
wxPoint hotSpot = ScreenToClient(wxGetMousePosition());
hotSpot -= (ClientToScreen(wxPoint(0, 0)) -
mAreaParent->ClientToScreen(wxPoint(0, 0)));
mDropTargets = mAreaParent->GetDropTargets();
mDropTarget = kDummyRect;
wxColour magicColor = wxColour(0, 255, 255);
wxImage tgtImage = theTheme.Image(bmpToolBarTarget);
ImageRoll tgtImageRoll = ImageRoll(ImageRoll::VerticalRoll,
tgtImage,
magicColor);
mTargetPanel = new ImageRollPanel(mAreaParent, -1, tgtImageRoll,
wxDefaultPosition,
wxDefaultSize,
wxTRANSPARENT_WINDOW);
mTargetPanel->SetLogicalFunction(wxXOR);
mTargetPanel->SetSize(mDropTarget);
// This gives time for wx to finish redrawing the window that way.
// HACK: why do we need to do it several times???
for(j=0; j<500; j++)
::wxSafeYield();
mAreaParent->SetCapturedChild(this);
mDragImage = new wxDragImage(toolbarBitmap);
mDragImage->BeginDrag(hotSpot, mAreaParent, mTopLevelParent);
mDragImage->Show();
mDragImage->Move(ScreenToClient(wxGetMousePosition()));
}
void ExpandingToolBar::UpdateMoving()
{
if (!mAreaParent || !mSavedArrangement || !mDragImage)
return;
wxPoint cursorPos = mAreaParent->ScreenToClient(wxGetMousePosition());
wxRect prevTarget = mDropTarget;
int best_dist_sq = 99999;
int i;
for(i=0; i<(int)mDropTargets.GetCount(); i++) {
int x = (mDropTargets[i].x + (mDropTargets[i].width/2))-cursorPos.x;
int y = (mDropTargets[i].y + (mDropTargets[i].height/2))-cursorPos.y;
int dist_sq = (x*x) + (y*y);
if (dist_sq < best_dist_sq) {
best_dist_sq = dist_sq;
mDropTarget = mDropTargets[i];
}
}
if (!mAreaParent->GetRect().Contains(cursorPos))
mDropTarget = kDummyRect;
if (mDropTarget != prevTarget) {
mDragImage->Hide();
wxRect r = mDropTarget;
r.Inflate(4, 4);
mTargetPanel->SetSize(r);
#if 0
wxClientDC dc(mAreaParent);
dc.DestroyClippingRegion();
dc.SetLogicalFunction(wxINVERT);
wxRect r = prevTarget;
r.Inflate(4, 4);
dc.DrawRectangle(r);
r = mDropTarget;
r.Inflate(4, 4);
dc.DrawRectangle(r);
#endif
// This gives time for wx to finish redrawing the window that way.
// HACK: why do we need to do it so many times???
for(i=0; i<500; i++)
::wxSafeYield();
mDragImage->Show();
mDragImage->Move(ScreenToClient(wxGetMousePosition()));
}
else
mDragImage->Move(ScreenToClient(wxGetMousePosition()));
}
void ExpandingToolBar::FinishMoving()
{
if (!mAreaParent || !mSavedArrangement)
return;
delete mTargetPanel;
mAreaParent->SetCapturedChild(NULL);
mDragImage->Hide();
mDragImage->EndDrag();
msNoAutoExpandStack--;
if (mDropTarget == kDummyRect) {
mAreaParent->RestoreArrangement(mSavedArrangement);
mSavedArrangement = NULL;
}
else {
delete mSavedArrangement;
mSavedArrangement = NULL;
mAreaParent->MoveChild(this, mDropTarget);
}
// Keep all drawers closed until the user moves specifically to a
// different window
mAreaParent->CollapseAll();
mTopLevelParent->Refresh(true);
mTimer.Start(kTimerInterval);
}
//
// ToolBarGrabber
//
BEGIN_EVENT_TABLE(ToolBarGrabber, wxPanel)
EVT_PAINT(ToolBarGrabber::OnPaint)
EVT_SIZE(ToolBarGrabber::OnSize)
EVT_MOUSE_EVENTS(ToolBarGrabber::OnMouse)
END_EVENT_TABLE()
IMPLEMENT_CLASS(ToolBarGrabber, wxPanel)
ToolBarGrabber::ToolBarGrabber(wxWindow *parent,
wxWindowID id,
ExpandingToolBar *ownerToolbar,
const wxPoint& pos,
const wxSize& size):
wxPanel(parent, id, pos, size),
mOwnerToolBar(ownerToolbar)
{
wxImage grabberImages = theTheme.Image(bmpToolBarGrabber);
wxColour magicColor = wxColour(0, 255, 255);
ImageArray images = ImageRoll::SplitH(grabberImages, magicColor);
mImageRoll[0] = ImageRoll(ImageRoll::VerticalRoll,
images[0],
magicColor);
mImageRoll[1] = ImageRoll(ImageRoll::VerticalRoll,
images[1],
magicColor);
SetSizeHints(mImageRoll[0].GetMinSize(),
mImageRoll[1].GetMaxSize());
mState = 0;
}
void ToolBarGrabber::OnMouse(wxMouseEvent &event)
{
int prevState = mState;
// Handle hilighting the image if the mouse is over it
if (event.Entering())
mState = 1;
else if (event.Leaving())
mState = 0;
else {
wxSize clientSize = GetClientSize();
if (event.m_x >= 0 && event.m_y >= 0 &&
event.m_x < clientSize.x && event.m_y < clientSize.y)
mState = 1;
else
mState = 0;
}
if (event.ButtonDown())
mOwnerToolBar->StartMoving();
if (mState != prevState)
Refresh(false);
}
void ToolBarGrabber::OnPaint(wxPaintEvent & WXUNUSED(event))
{
wxPaintDC dc(this);
mImageRoll[mState].Draw(dc, GetClientRect());
}
void ToolBarGrabber::OnSize(wxSizeEvent & WXUNUSED(event))
{
Refresh(false);
}
//
// ToolBarDialog
//
BEGIN_EVENT_TABLE(ToolBarDialog, wxDialog)
END_EVENT_TABLE()
IMPLEMENT_CLASS(ToolBarDialog, wxDialog)
ToolBarDialog::ToolBarDialog(wxWindow* parent,
wxWindowID id,
const wxString& name,
const wxPoint& pos):
wxDialog(parent, id, name, pos, wxSize(1, 1),
// Workaround for bug in __WXMSW__. No close box on a wxDialog unless wxSYSTEM_MENU is used.
#ifdef __WXMSW__
wxSYSTEM_MENU |
#endif
wxCAPTION|wxCLOSE_BOX),
mChild(NULL)
{
}
ToolBarDialog::~ToolBarDialog()
{
}
void ToolBarDialog::SetChild(ExpandingToolBar *child)
{
mChild = child;
if (mChild && mChild->GetParent() != this)
mChild->Reparent(this);
Fit();
}
void ToolBarDialog::Fit()
{
if (mChild) {
wxSize childSize = mChild->GetBestSize();
// Take into account the difference between the content
// size and the frame size
wxSize curContentSize = GetClientSize();
wxSize curFrameSize = GetSize();
wxSize newFrameSize = childSize + (curFrameSize - curContentSize);
SetSizeHints(newFrameSize, newFrameSize);
SetSize(newFrameSize);
}
}
//
// ToolBarFrame
//
BEGIN_EVENT_TABLE(ToolBarFrame, wxMiniFrame)
END_EVENT_TABLE()
IMPLEMENT_CLASS(ToolBarFrame, wxMiniFrame)
ToolBarFrame::ToolBarFrame(wxWindow* parent,
wxWindowID id,
const wxString& name,
const wxPoint& pos):
wxMiniFrame(parent, id, name, pos, wxSize(1, 1),
// Workaround for bug in __WXMSW__. No close box on a miniframe unless wxSYSTEM_MENU is used.
#ifdef __WXMSW__
wxSYSTEM_MENU |
#endif
wxCAPTION|wxCLOSE_BOX),
mChild(NULL)
{
}
ToolBarFrame::~ToolBarFrame()
{
}
void ToolBarFrame::SetChild(ExpandingToolBar *child)
{
mChild = child;
if (mChild && mChild->GetParent() != this)
mChild->Reparent(this);
Fit();
}
void ToolBarFrame::Fit()
{
if (mChild) {
wxSize childSize = mChild->GetBestSize();
// Take into account the difference between the content
// size and the frame size
wxSize curContentSize = GetClientSize();
wxSize curFrameSize = GetSize();
wxSize newFrameSize = childSize + (curFrameSize - curContentSize);
SetSizeHints(newFrameSize, newFrameSize);
SetSize(newFrameSize);
}
}
//
// ToolBarArea
//
BEGIN_EVENT_TABLE(ToolBarArea, wxPanel)
EVT_SIZE(ToolBarArea::OnSize)
EVT_MOUSE_EVENTS(ToolBarArea::OnMouse)
END_EVENT_TABLE()
IMPLEMENT_CLASS(ToolBarArea, wxPanel)
ToolBarArea::ToolBarArea(wxWindow* parent,
wxWindowID id,
const wxPoint& pos,
const wxSize& size):
wxPanel(parent, id, pos, size),
mInOnSize(false),
mCapturedChild(NULL)
{
}
ToolBarArea::~ToolBarArea()
{
}
void ToolBarArea::ContractRow(int rowIndex)
{
// Contract all of the toolbars in a given row to their
// minimum size. This is an intermediate step in layout.
int i;
int x = 0;
for(i=0; i<(int)mChildArray.GetCount(); i++)
if (mRowArray[i] == rowIndex) {
wxPoint childPos = mChildArray[i]->GetPosition();
wxSize childMin = mChildArray[i]->GetMinSize();
mChildArray[i]->SetSize(x, childPos.y,
childMin.x, childMin.y);
x += childMin.x;
}
}
bool ToolBarArea::ExpandRow(int rowIndex)
{
// Expand all of the toolbars in a given row so that the
// whole width is filled, if possible. This is the last
// step after laying out as many toolbars as possible in
// that row. Returns false if it's not possible to fit
// all of these toolbars in one row anymore.
wxSize area = GetClientSize();
int i, j, x;
int minWidth = 0;
int leftoverSpace = 0;
int expandableCount = 0;
int toolbarCount = 0;
for(i=0; i<(int)mChildArray.GetCount(); i++)
if (mRowArray[i] == rowIndex) {
ExpandingToolBar *child = mChildArray[i];
wxSize childMin = child->GetMinSize();
wxSize childMax = child->GetMaxSize();
minWidth += childMin.x;
toolbarCount++;
if (childMax.x > childMin.x)
expandableCount++;
}
leftoverSpace = area.x - minWidth;
if (leftoverSpace <= 0) {
if (toolbarCount > 1)
return false; // not possible to fit all in one row
else
return true; // there's only one, so it doesn't matter
}
j = 0;
x = 0;
for(i=0; i<(int)mChildArray.GetCount(); i++)
if (mRowArray[i] == rowIndex) {
ExpandingToolBar *child = mChildArray[i];
wxPoint childPos = child->GetPosition();
wxSize childMin = child->GetMinSize();
wxSize childMax = child->GetMaxSize();
int width = childMin.x;
if (childMax.x > childMin.x)
width +=
(leftoverSpace * (j+1) / expandableCount) -
(leftoverSpace * (j) / expandableCount);
mChildArray[i]->SetSize(x, childPos.y,
width, childMin.y);
x += width;
j++;
}
return true; // success
}
void ToolBarArea::LayoutOne(int childIndex)
{
wxSize area = GetClientSize();
ExpandingToolBar *child = mChildArray[childIndex];
wxSize childMin = child->GetMinSize();
if (childIndex == 0) {
mRowArray[childIndex] = 0;
mChildArray[childIndex]->SetSize(0, 0, childMin.x, childMin.y);
ExpandRow(0);
#if 0
wxPoint p = mChildArray[childIndex]->GetPosition();
wxSize s = mChildArray[childIndex]->GetSize();
printf("ToolBar %d moved to row %d at (%d, %d), size (%d x %d)\n",
childIndex, mRowArray[childIndex],
p.x, p.y, s.x, s.y);
#endif
mLastLayoutSize = area;
return;
}
int prevRow = mRowArray[childIndex-1];
ContractRow(prevRow);
wxPoint prevPos = mChildArray[childIndex-1]->GetPosition();
wxSize prevSize = mChildArray[childIndex-1]->GetSize();
int prevX = prevPos.x + prevSize.x;
int availableWidth = area.x - prevX;
if (childMin.x <= availableWidth) {
// It fits into the same row
mRowArray[childIndex] = prevRow;
mChildArray[childIndex]->SetSize(prevX, prevPos.y,
childMin.x, childMin.y);
ExpandRow(prevRow);
}
else {
// Go to the next row
ExpandRow(prevRow);
mRowArray[childIndex] = prevRow + 1;
int i;
int maxRowHeight = 0;
for(i=0; i<childIndex; i++)
if (mRowArray[i] == prevRow &&
mChildArray[i]->GetSize().y > maxRowHeight)
maxRowHeight = mChildArray[i]->GetSize().y;
mChildArray[childIndex]->SetSize(0, prevPos.y + maxRowHeight,
childMin.x, childMin.y);
ExpandRow(prevRow+1);
}
// Save the size of the window the last time we moved one of the
// toolbars around. If the user does a minor resize, we try to
// preserve the layout. If the user does a major resize, we're
// allowed to redo the layout.
mLastLayoutSize = area;
#if 0
wxPoint p = mChildArray[childIndex]->GetPosition();
wxSize s = mChildArray[childIndex]->GetSize();
printf("ToolBar %d moved to row %d at (%d, %d), size (%d x %d)\n",
childIndex, mRowArray[childIndex],
p.x, p.y, s.x, s.y);
#endif
}
bool ToolBarArea::Layout()
{
// Redo the layout from scratch, preserving only the order of
// the children
int i;
for(i=0; i<(int)mChildArray.GetCount(); i++)
mRowArray[i] = -1;
for(i=0; i<(int)mChildArray.GetCount(); i++)
LayoutOne(i);
Refresh(true);
return true;
}
void ToolBarArea::AdjustLayout()
{
// Try to modify the layout as little as possible - but if that's
// impossible, redo the layout as necessary.
int row = -1;
int i, j;
for(i=0; i<(int)mChildArray.GetCount(); i++) {
if (mRowArray[i] > row) {
row = mRowArray[i];
bool success = ExpandRow(row);
if (!success) {
// Re-layout all toolbars from this row on
for(j=i; j<(int)mChildArray.GetCount(); j++)
LayoutOne(j);
return;
}
}
}
}
void ToolBarArea::Fit()
{
Fit(true, true);
}
void ToolBarArea::Fit(bool horizontal, bool vertical)
{
wxSize clientSize = GetClientSize();
wxSize minSize;
wxSize maxSize;
wxSize actualSize;
int i;
minSize.x = 0;
minSize.y = 0;
maxSize.x = 9999;
maxSize.y = 0;
for(i=0; i<(int)mChildArray.GetCount(); i++) {
wxPoint childPos = mChildArray[i]->GetPosition();
wxSize childSize = mChildArray[i]->GetSize();
if (childPos.x + childSize.x > actualSize.x) {
actualSize.x = childPos.x + childSize.x;
}
if (childSize.x > minSize.x) {
minSize.x = childSize.x;
}
if (childPos.y + childSize.y > maxSize.y) {
maxSize.y = childPos.y + childSize.y;
minSize.y = maxSize.y;
actualSize.y = maxSize.y;
}
}
if (!horizontal && actualSize.x < clientSize.x)
actualSize.x = clientSize.x;
if (!vertical && actualSize.y < clientSize.y)
actualSize.y = clientSize.y;
if (minSize != mMinSize ||
maxSize != mMaxSize) {
mMinSize = minSize;
mMaxSize = maxSize;
SetSizeHints(mMinSize, mMaxSize);
}
if (actualSize != mActualSize) {
mActualSize = actualSize;
SetSize(mActualSize);
}
}
void ToolBarArea::OnSize(wxSizeEvent & WXUNUSED(event))
{
if (mInOnSize)
return;
mInOnSize = true;
wxSize currentSize = GetClientSize();
if (abs(currentSize.x - mLastLayoutSize.x >= 100)) {
// If they resize by more than 100 pixels (horizontally),
// we totally redo the layout, preserving the order of the
// toolbars but not the exact position.
Layout();
}
else {
// If it was a minor resize, we try to preserve the positions of
// the toolbars. If this is impossible, we still redo the layout,
// of course.
AdjustLayout();
}
Fit(false, true);
mInOnSize = false;
}
void ToolBarArea::OnMouse(wxMouseEvent &evt)
{
if (mCapturedChild) {
if (evt.ButtonUp())
mCapturedChild->FinishMoving();
else if (evt.Moving() || evt.Dragging())
mCapturedChild->UpdateMoving();
}
else {
evt.Skip();
}
}
void ToolBarArea::CollapseAll(bool now)
{
int i;
for(i=0; i<(int)mChildArray.GetCount(); i++)
mChildArray[i]->Collapse(now);
}
void ToolBarArea::AddChild(ExpandingToolBar *child)
{
mChildArray.Add(child);
mRowArray.Add(-1); // unknown row
LayoutOne(mChildArray.GetCount()-1);
Fit(false, true);
}
void ToolBarArea::RemoveChild(ExpandingToolBar *child)
{
int i, j;
for(i=0; i<(int)mChildArray.GetCount(); i++) {
if (mChildArray[i] == child) {
child->Hide();
mChildArray.RemoveAt(i);
mRowArray.RemoveAt(i);
for(j=i; j<(int)mChildArray.GetCount(); j++)
mRowArray[j] = -1;
for(j=i; j<(int)mChildArray.GetCount(); j++)
LayoutOne(j);
Fit(false, true);
}
}
}
ToolBarArrangement *ToolBarArea::SaveArrangement()
{
ToolBarArrangement *arrangement = new ToolBarArrangement();
int i;
arrangement->childArray = mChildArray;
arrangement->rowArray = mRowArray;
for(i=0; i<(int)mChildArray.GetCount(); i++)
arrangement->rectArray.Add(mChildArray[i]->GetRect());
return arrangement;
}
void ToolBarArea::RestoreArrangement(ToolBarArrangement *arrangement)
{
int i;
mChildArray = arrangement->childArray;
mRowArray = arrangement->rowArray;
for(i=0; i<(int)mChildArray.GetCount(); i++) {
mChildArray[i]->SetSize(arrangement->rectArray[i]);
mChildArray[i]->Show();
}
Fit(false, true);
delete arrangement;
}
wxArrayRect ToolBarArea::GetDropTargets()
{
mDropTargets.Clear();
mDropTargetIndices.Clear();
mDropTargetRows.Clear();
int numChildren = (int)mChildArray.GetCount();
int i;
int row = -1;
if (numChildren == 0)
return mDropTargets;
for(i=0; i<numChildren; i++) {
int childRow = mRowArray[i];
wxRect childRect = mChildArray[i]->GetRect();
if (childRow != row) {
// Add a target before this child (at beginning of row only)
row = childRow;
mDropTargetIndices.Add(i);
mDropTargetRows.Add(row);
mDropTargets.Add(wxRect(childRect.x, childRect.y,
0, childRect.height));
}
// Add a target after this child (always)
mDropTargetIndices.Add(i+1);
mDropTargetRows.Add(row);
mDropTargets.Add(wxRect(childRect.x+childRect.width, childRect.y,
0, childRect.height));
}
return mDropTargets;
}
void ToolBarArea::MoveChild(ExpandingToolBar *toolBar, wxRect dropTarget)
{
int i, j;
for(i=0; i<(int)mDropTargets.GetCount(); i++) {
if (dropTarget == mDropTargets[i]) {
int newIndex = mDropTargetIndices[i];
int newRow = mDropTargetRows[i];
mChildArray.Insert(toolBar, newIndex);
mRowArray.Insert(newRow, newIndex);
for(j=newIndex+1; j<(int)mChildArray.GetCount(); j++)
mRowArray[j] = -1;
ContractRow(newRow);
mChildArray[newIndex]->Show();
for(j=newIndex; j<(int)mChildArray.GetCount(); j++)
LayoutOne(j);
Fit(false, true);
return;
}
}
}
void ToolBarArea::SetCapturedChild(ExpandingToolBar *child)
{
mCapturedChild = child;
}<|fim▁end|> | // The grabber doesn't count!
if (mGrabber && mGrabber->GetRect().Contains(localMouse))
result = false;
|
<|file_name|>WeaponHandler.java<|end_file_name|><|fim▁begin|>/**
* MegaMek - Copyright (C) 2004,2005 Ben Mazur ([email protected])
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
package megamek.common.weapons;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Vector;
import megamek.common.Aero;
import megamek.common.AmmoType;
import megamek.common.BattleArmor;
import megamek.common.Building;
import megamek.common.Compute;
import megamek.common.Coords;
import megamek.common.Entity;
import megamek.common.EquipmentMode;
import megamek.common.EquipmentType;
import megamek.common.HitData;
import megamek.common.IAimingModes;
import megamek.common.IGame;
import megamek.common.ITerrain;
import megamek.common.Infantry;
import megamek.common.LosEffects;
import megamek.common.Mech;
import megamek.common.Mounted;
import megamek.common.RangeType;
import megamek.common.Report;
import megamek.common.TagInfo;
import megamek.common.TargetRoll;
import megamek.common.Targetable;
import megamek.common.Terrains;
import megamek.common.ToHitData;
import megamek.common.WeaponType;
import megamek.common.actions.WeaponAttackAction;
import megamek.common.options.OptionsConstants;
import megamek.server.Server;
import megamek.server.Server.DamageType;
import megamek.server.SmokeCloud;
/**
* @author Andrew Hunter A basic, simple attack handler. May or may not work for
* any particular weapon; must be overloaded to support special rules.
*/
public class WeaponHandler implements AttackHandler, Serializable {
private static final long serialVersionUID = 7137408139594693559L;
public ToHitData toHit;
protected HitData hit;
public WeaponAttackAction waa;
public int roll;
protected boolean isJammed = false;
protected IGame game;
protected transient Server server; // must not save the server
protected boolean bMissed;
protected boolean bSalvo = false;
protected boolean bGlancing = false;
protected boolean bDirect = false;
protected boolean nukeS2S = false;
protected WeaponType wtype;
protected String typeName;
protected Mounted weapon;
protected Entity ae;
protected Targetable target;
protected int subjectId;
protected int nRange;
protected int nDamPerHit;
protected int attackValue;
protected boolean throughFront;
protected boolean underWater;
protected boolean announcedEntityFiring = false;
protected boolean missed = false;
protected DamageType damageType;
protected int generalDamageType = HitData.DAMAGE_NONE;
protected Vector<Integer> insertedAttacks = new Vector<Integer>();
protected int nweapons; // for capital fighters/fighter squadrons
protected int nweaponsHit; // for capital fighters/fighter squadrons
protected boolean secondShot = false;
protected int numRapidFireHits;
protected String sSalvoType = " shot(s) ";
protected int nSalvoBonus = 0;
/**
* Keeps track of whether we are processing the first hit in a series of
* hits (like for cluster weapons)
*/
protected boolean firstHit = true;
/**
* Boolean flag that determines whether or not this attack is part of a
* strafing run.
*/
protected boolean isStrafing = false;
/**
* Boolean flag that determiens if this shot was the first one by a
* particular weapon in a strafing run. Used to ensure that heat is only
* added once.
*/
protected boolean isStrafingFirstShot = false;
/**
* return the <code>int</code> Id of the attacking <code>Entity</code>
*/
public int getAttackerId() {
return ae.getId();
}
/**
* Do we care about the specified phase?
*/
public boolean cares(IGame.Phase phase) {
if (phase == IGame.Phase.PHASE_FIRING) {
return true;
}
return false;
}
/**
* @param vPhaseReport
* - A <code>Vector</code> containing the phasereport.
* @return a <code>boolean</code> value indicating wether or not the attack
* misses because of a failed check.
*/
protected boolean doChecks(Vector<Report> vPhaseReport) {
return false;
}
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
}
private void readObject(ObjectInputStream in) throws IOException,
ClassNotFoundException {
in.defaultReadObject();
server = Server.getServerInstance();
}
/**
* @return a <code>boolean</code> value indicating wether or not this attack
* needs further calculating, like a missed shot hitting a building,
* or an AMS only shooting down some missiles.
*/
protected boolean handleSpecialMiss(Entity entityTarget,
boolean targetInBuilding, Building bldg, Vector<Report> vPhaseReport) {
// Shots that miss an entity can set fires.
// Buildings can't be accidentally ignited,
// and some weapons can't ignite fires.
if ((entityTarget != null)
&& ((bldg == null) && (wtype.getFireTN() != TargetRoll.IMPOSSIBLE))) {
server.tryIgniteHex(target.getPosition(), subjectId, false, false,
new TargetRoll(wtype.getFireTN(), wtype.getName()), 3,
vPhaseReport);
}
// shots that miss an entity can also potential cause explosions in a
// heavy industrial hex
server.checkExplodeIndustrialZone(target.getPosition(), vPhaseReport);
// BMRr, pg. 51: "All shots that were aimed at a target inside
// a building and miss do full damage to the building instead."
if (!targetInBuilding
|| (toHit.getValue() == TargetRoll.AUTOMATIC_FAIL)) {
return false;
}
return true;
}
/**
* Calculate the number of hits
*
* @param vPhaseReport
* - the <code>Vector</code> containing the phase report.
* @return an <code>int</code> containing the number of hits.
*/
protected int calcHits(Vector<Report> vPhaseReport) {
// normal BA attacks (non-swarm, non single-trooper weapons)
// do more than 1 hit
if ((ae instanceof BattleArmor)
&& (weapon.getLocation() == BattleArmor.LOC_SQUAD)
&& !(weapon.isSquadSupportWeapon())
&& !(ae.getSwarmTargetId() == target.getTargetId())) {
bSalvo = true;
int toReturn = allShotsHit() ? ((BattleArmor) ae)
.getShootingStrength() : Compute
.missilesHit(((BattleArmor) ae).getShootingStrength());
Report r = new Report(3325);
r.newlines = 0;
r.subject = subjectId;
r.add(toReturn);
r.add(" troopers ");
r.add(toHit.getTableDesc());
vPhaseReport.add(r);
return toReturn;
}
return 1;
}
/**
* Calculate the clustering of the hits
*
* @return a <code>int</code> value saying how much hits are in each cluster
* of damage.
*/
protected int calcnCluster() {
return 1;
}
protected int calcnClusterAero(Entity entityTarget) {
if (usesClusterTable() && !ae.isCapitalFighter()
&& (entityTarget != null) && !entityTarget.isCapitalScale()) {
return 5;
} else {
return 1;
}
}
protected int[] calcAeroDamage(Entity entityTarget,
Vector<Report> vPhaseReport) {
// Now I need to adjust this for attacks on aeros because they use
// attack values and different rules
// this will work differently for cluster and non-cluster
// weapons, and differently for capital fighter/fighter
// squadrons
if (game.getOptions().booleanOption("aero_sanity")) {
// everything will use the normal hits and clusters for hits weapon
// unless
// we have a squadron or capital scale entity
int reportSize = vPhaseReport.size();
int hits = calcHits(vPhaseReport);
int nCluster = calcnCluster();
if (ae.isCapitalFighter()) {
Vector<Report> throwAwayReport = new Vector<Report>();
// for capital scale fighters, each non-cluster weapon hits a
// different location
bSalvo = true;
hits = 1;
if (nweapons > 1) {
if (allShotsHit()) {
nweaponsHit = nweapons;
} else {
nweaponsHit = Compute.missilesHit(nweapons,
((Aero) ae).getClusterMods());
}
if (usesClusterTable()) {
// remove the last reports because they showed the
// number of shots that hit
while (vPhaseReport.size() > reportSize) {
vPhaseReport.remove(vPhaseReport.size() - 1);
}
// nDamPerHit = 1;
hits = 0;
for (int i = 0; i < nweaponsHit; i++) {
hits += calcHits(throwAwayReport);
}
Report r = new Report(3325);
r.subject = subjectId;
r.add(hits);
r.add(sSalvoType);
r.add(toHit.getTableDesc());
r.newlines = 0;
vPhaseReport.add(r);
} else {
nCluster = 1;
Report r = new Report(3325);
r.subject = subjectId;
r.add(nweaponsHit);
r.add(" weapon(s) ");
r.add(" ");
r.newlines = 0;
hits = nweaponsHit;
vPhaseReport.add(r);
}
}
}
int[] results = new int[2];
results[0] = hits;
results[1] = nCluster;
return results;
} else {
int hits = 1;
int nCluster = calcnClusterAero(entityTarget);
if (ae.isCapitalFighter()) {
bSalvo = false;
if (nweapons > 1) {
nweaponsHit = Compute.missilesHit(nweapons,
((Aero) ae).getClusterMods());
Report r = new Report(3325);
r.subject = subjectId;
r.add(nweaponsHit);
r.add(" weapon(s) ");
r.add(" ");
r.newlines = 0;
vPhaseReport.add(r);
}
nDamPerHit = attackValue * nweaponsHit;
hits = 1;
nCluster = 1;
} else if (nCluster > 1) {
bSalvo = true;
nDamPerHit = 1;
hits = attackValue;
} else {
bSalvo = false;
nDamPerHit = attackValue;
hits = 1;
nCluster = 1;
}
int[] results = new int[2];
results[0] = hits;
results[1] = nCluster;
return results;
}
}
/**
* handle this weapons firing
*
* @return a <code>boolean</code> value indicating whether this should be
* kept or not
*/
public boolean handle(IGame.Phase phase, Vector<Report> vPhaseReport) {
if (!cares(phase)) {
return true;
}
boolean heatAdded = false;
int numAttacks = 1;
if (game.getOptions().booleanOption("uac_tworolls")
&& ((wtype.getAmmoType() == AmmoType.T_AC_ULTRA) || (wtype
.getAmmoType() == AmmoType.T_AC_ULTRA_THB))
&& !weapon.curMode().equals("Single")) {
numAttacks = 2;
}
insertAttacks(phase, vPhaseReport);
Entity entityTarget = (target.getTargetType() == Targetable.TYPE_ENTITY) ? (Entity) target
: null;
final boolean targetInBuilding = Compute.isInBuilding(game,
entityTarget);
if (entityTarget != null) {
ae.setLastTarget(entityTarget.getId());
ae.setLastTargetDisplayName(entityTarget.getDisplayName());
}
// Which building takes the damage?
Building bldg = game.getBoard().getBuildingAt(target.getPosition());
String number = nweapons > 1 ? " (" + nweapons + ")" : "";
for (int i = numAttacks; i > 0; i--) {
// Report weapon attack and its to-hit value.
Report r = new Report(3115);
r.indent();
r.newlines = 0;
r.subject = subjectId;
r.add(wtype.getName() + number);
if (entityTarget != null) {
if ((wtype.getAmmoType() != AmmoType.T_NA)
&& (weapon.getLinked() != null)
&& (weapon.getLinked().getType() instanceof AmmoType)) {
AmmoType atype = (AmmoType) weapon.getLinked().getType();
if (atype.getMunitionType() != AmmoType.M_STANDARD) {
r.messageId = 3116;
r.add(atype.getSubMunitionName());
}
}
r.addDesc(entityTarget);
} else {
r.messageId = 3120;
r.add(target.getDisplayName(), true);
}
vPhaseReport.addElement(r);
if (toHit.getValue() == TargetRoll.IMPOSSIBLE) {
r = new Report(3135);
r.subject = subjectId;
r.add(toHit.getDesc());
vPhaseReport.addElement(r);
return false;
} else if (toHit.getValue() == TargetRoll.AUTOMATIC_FAIL) {
r = new Report(3140);
r.newlines = 0;
r.subject = subjectId;
r.add(toHit.getDesc());
vPhaseReport.addElement(r);
} else if (toHit.getValue() == TargetRoll.AUTOMATIC_SUCCESS) {
r = new Report(3145);
r.newlines = 0;
r.subject = subjectId;
r.add(toHit.getDesc());
vPhaseReport.addElement(r);
} else {
// roll to hit
r = new Report(3150);
r.newlines = 0;
r.subject = subjectId;
r.add(toHit.getValue());
vPhaseReport.addElement(r);
}
// dice have been rolled, thanks
r = new Report(3155);
r.newlines = 0;
r.subject = subjectId;
r.add(roll);
vPhaseReport.addElement(r);
// do we hit?
bMissed = roll < toHit.getValue();
// are we a glancing hit?
if (game.getOptions().booleanOption("tacops_glancing_blows")) {
if (roll == toHit.getValue()) {
bGlancing = true;
r = new Report(3186);
r.subject = ae.getId();
r.newlines = 0;
vPhaseReport.addElement(r);
} else {
bGlancing = false;
}
} else {
bGlancing = false;
}
// Set Margin of Success/Failure.
toHit.setMoS(roll - Math.max(2, toHit.getValue()));
bDirect = game.getOptions().booleanOption("tacops_direct_blow")
&& ((toHit.getMoS() / 3) >= 1) && (entityTarget != null);
if (bDirect) {
r = new Report(3189);
r.subject = ae.getId();
r.newlines = 0;
vPhaseReport.addElement(r);
}
// Do this stuff first, because some weapon's miss report reference
// the
// amount of shots fired and stuff.
nDamPerHit = calcDamagePerHit();
if (!heatAdded) {
addHeat();
heatAdded = true;
}
attackValue = calcAttackValue();
// Any necessary PSRs, jam checks, etc.
// If this boolean is true, don't report
// the miss later, as we already reported
// it in doChecks
boolean missReported = doChecks(vPhaseReport);
if (missReported) {
bMissed = true;
}
// Do we need some sort of special resolution (minefields,
// artillery,
if (specialResolution(vPhaseReport, entityTarget) && (i < 2)) {
return false;
}
if (bMissed && !missReported) {
if (game.getOptions().booleanOption("uac_tworolls")
&& ((wtype.getAmmoType() == AmmoType.T_AC_ULTRA) || (wtype
.getAmmoType() == AmmoType.T_AC_ULTRA_THB))
&& (i == 2)) {
reportMiss(vPhaseReport, true);
} else {
reportMiss(vPhaseReport);
}
// Works out fire setting, AMS shots, and whether continuation
// is
// necessary.
if (!handleSpecialMiss(entityTarget, targetInBuilding, bldg,
vPhaseReport) && (i < 2)) {
return false;
}
}
// yeech. handle damage. . different weapons do this in very
// different
// ways
int nCluster = calcnCluster();
int id = vPhaseReport.size();
int hits = calcHits(vPhaseReport);
if (target.isAirborne() || game.getBoard().inSpace()) {
// if we added a line to the phase report for calc hits, remove
// it now
while (vPhaseReport.size() > id) {
vPhaseReport.removeElementAt(vPhaseReport.size() - 1);
}
int[] aeroResults = calcAeroDamage(entityTarget, vPhaseReport);
hits = aeroResults[0];
nCluster = aeroResults[1];
}
// We have to adjust the reports on a miss, so they line up
if (bMissed && id != vPhaseReport.size()) {
vPhaseReport.get(id - 1).newlines--;
vPhaseReport.get(id).indent(2);
vPhaseReport.get(vPhaseReport.size() - 1).newlines++;
}
if (!bMissed) {
// The building shields all units from a certain amount of
// damage.
// The amount is based upon the building's CF at the phase's
// start.
int bldgAbsorbs = 0;
if (targetInBuilding && (bldg != null)) {
bldgAbsorbs = bldg.getAbsorbtion(target.getPosition());
}
// Make sure the player knows when his attack causes no damage.
if (hits == 0) {
r = new Report(3365);
r.subject = subjectId;
vPhaseReport.addElement(r);
}
// for each cluster of hits, do a chunk of damage
while (hits > 0) {
int nDamage;
if ((target.getTargetType() == Targetable.TYPE_HEX_TAG)
|| (target.getTargetType() == Targetable.TYPE_BLDG_TAG)) {
int priority = 1;
EquipmentMode mode = (weapon.curMode());
if (mode != null) {
if (mode.getName() == "1-shot") {
priority = 1;
} else if (mode.getName() == "2-shot") {
priority = 2;
} else if (mode.getName() == "3-shot") {
priority = 3;
} else if (mode.getName() == "4-shot") {
priority = 4;
}
}
TagInfo info = new TagInfo(ae.getId(),
target.getTargetType(), target, priority, false);
game.addTagInfo(info);
r = new Report(3390);
r.subject = subjectId;
vPhaseReport.addElement(r);
hits = 0;
}
// targeting a hex for igniting
if ((target.getTargetType() == Targetable.TYPE_HEX_IGNITE)
|| (target.getTargetType() == Targetable.TYPE_BLDG_IGNITE)) {
handleIgnitionDamage(vPhaseReport, bldg, hits);
hits = 0;
}
// targeting a hex for clearing
if (target.getTargetType() == Targetable.TYPE_HEX_CLEAR) {
nDamage = nDamPerHit * hits;
handleClearDamage(vPhaseReport, bldg, nDamage);
hits = 0;
}
// Targeting a building.
if (target.getTargetType() == Targetable.TYPE_BUILDING) {
// The building takes the full brunt of the attack.
nDamage = nDamPerHit * hits;
handleBuildingDamage(vPhaseReport, bldg, nDamage,
target.getPosition());
hits = 0;
}
if (entityTarget != null) {
handleEntityDamage(entityTarget, vPhaseReport, bldg,
hits, nCluster, bldgAbsorbs);
server.creditKill(entityTarget, ae);
hits -= nCluster;
firstHit = false;
}
} // Handle the next cluster.
} else { // We missed, but need to handle special miss cases
// When shooting at a non-infantry unit in a building and the
// shot misses, the building is damaged instead, TW pg 171
int dist = ae.getPosition().distance(target.getPosition());
if (targetInBuilding && !(entityTarget instanceof Infantry)
&& dist == 1) {
r = new Report(6429);
r.indent(2);
r.subject = ae.getId();
r.newlines--;
vPhaseReport.add(r);
int nDamage = nDamPerHit * hits;
// We want to set bSalvo to true to prevent
// handleBuildingDamage from reporting a hit
boolean savedSalvo = bSalvo;
bSalvo = true;
handleBuildingDamage(vPhaseReport, bldg, nDamage,
target.getPosition());
bSalvo = savedSalvo;
hits = 0;
}
}
if (game.getOptions().booleanOption("uac_tworolls")
&& ((wtype.getAmmoType() == AmmoType.T_AC_ULTRA) || (wtype
.getAmmoType() == AmmoType.T_AC_ULTRA_THB))
&& (i == 2)) {
// Jammed weapon doesn't get 2nd shot...
if (isJammed) {
r = new Report(9905);
r.indent();
r.subject = ae.getId();
vPhaseReport.addElement(r);
i--;
} else { // If not jammed, it gets the second shot...
r = new Report(9900);
r.indent();
r.subject = ae.getId();
vPhaseReport.addElement(r);
if (null != ae.getCrew()) {
roll = ae.getCrew().rollGunnerySkill();
} else {
roll = Compute.d6(2);
}
}
}
}
Report.addNewline(vPhaseReport);
return false;
}
/**
* Calculate the damage per hit.
*
* @return an <code>int</code> representing the damage dealt per hit.
*/
protected int calcDamagePerHit() {
double toReturn = wtype.getDamage(nRange);
// Check for BA vs BA weapon effectiveness, if option is on
if (game.getOptions().booleanOption("tacops_ba_vs_ba")
&& (target instanceof BattleArmor)) {
// We don't check to make sure the attacker is BA, as most weapons
// will return their normal damage.
toReturn = Compute.directBlowBADamage(toReturn,
wtype.getBADamageClass(), (BattleArmor) target);
}
// we default to direct fire weapons for anti-infantry damage
if ((target instanceof Infantry) && !(target instanceof BattleArmor)) {
toReturn = Compute.directBlowInfantryDamage(toReturn,
bDirect ? toHit.getMoS() / 3 : 0,
wtype.getInfantryDamageClass(),
((Infantry) target).isMechanized());
} else if (bDirect) {
toReturn = Math.min(toReturn + (toHit.getMoS() / 3), toReturn * 2);
}
if (bGlancing) {
// Round up glancing blows against conventional infantry
if ((target instanceof Infantry)
&& !(target instanceof BattleArmor)) {
toReturn = (int) Math.ceil(toReturn / 2.0);
} else {
toReturn = (int) Math.floor(toReturn / 2.0);
}
}
if (game.getOptions().booleanOption(OptionsConstants.AC_TAC_OPS_RANGE)
&& (nRange > wtype.getRanges(weapon)[RangeType.RANGE_LONG])) {
toReturn = (int) Math.floor(toReturn * .75);
}
if (game.getOptions().booleanOption(
OptionsConstants.AC_TAC_OPS_LOS_RANGE)
&& (nRange > wtype.getRanges(weapon)[RangeType.RANGE_EXTREME])) {
toReturn = (int) Math.floor(toReturn * .5);
}
return (int) toReturn;
}
/**
* Calculate the attack value based on range
*
* @return an <code>int</code> representing the attack value at that range.
*/
protected int calcAttackValue() {
int av = 0;
// if we have a ground firing unit, then AV should not be determined by
// aero range brackets
if (!ae.isAirborne() || game.getOptions().booleanOption("uac_tworolls")) {
if (usesClusterTable()) {
// for cluster weapons just use the short range AV
av = wtype.getRoundShortAV();
} else {
// otherwise just use the full weapon damage by range
av = wtype.getDamage(nRange);
}
} else {
// we have an airborne attacker, so we need to use aero range
// brackets
int range = RangeType.rangeBracket(nRange, wtype.getATRanges(),
true, false);
if (range == WeaponType.RANGE_SHORT) {
av = wtype.getRoundShortAV();
} else if (range == WeaponType.RANGE_MED) {
av = wtype.getRoundMedAV();
} else if (range == WeaponType.RANGE_LONG) {
av = wtype.getRoundLongAV();
} else if (range == WeaponType.RANGE_EXT) {
av = wtype.getRoundExtAV();
}
}
if (bDirect) {
av = Math.min(av + (toHit.getMoS() / 3), av * 2);
}
if (bGlancing) {
av = (int) Math.floor(av / 2.0);
}
av = (int) Math.floor(getBracketingMultiplier() * av);
return av;
}
/**
* * adjustment factor on attack value for fighter squadrons
*/
protected double getBracketingMultiplier() {
double mult = 1.0;
if (wtype.hasModes() && weapon.curMode().equals("Bracket 80%")) {
mult = 0.8;
}
if (wtype.hasModes() && weapon.curMode().equals("Bracket 60%")) {
mult = 0.6;
}
if (wtype.hasModes() && weapon.curMode().equals("Bracket 40%")) {
mult = 0.4;
}
return mult;
}
/*
* Return the capital missile target for criticals. Zero if not a capital
* missile
*/
protected int getCapMisMod() {
return 0;
}
/**
* Handles potential damage to partial cover that absorbs a shot. The
* <code>ToHitData</code> is checked to what if there is any damagable cover
* to be hit, and if so which cover gets hit (there are two possibilities in
* some cases, such as 75% partial cover). The method then takes care of
* assigning damage to the cover. Buildings are damaged directly, while
* dropships call the <code>handleEntityDamage</code> method.
*
* @param entityTarget
* The target Entity
* @param vPhaseReport
* @param pcHit
* @param bldg
* @param hits
* @param nCluster
* @param bldgAbsorbs
*/
protected void handlePartialCoverHit(Entity entityTarget,
Vector<Report> vPhaseReport, HitData pcHit, Building bldg,
int hits, int nCluster, int bldgAbsorbs) {
// Report the hit and table description, if this isn't part of a salvo
Report r;
if (!bSalvo) {
r = new Report(3405);
r.subject = subjectId;
r.add(toHit.getTableDesc());
r.add(entityTarget.getLocationAbbr(pcHit));
vPhaseReport.addElement(r);
if (weapon.isRapidfire()) {
r.newlines = 0;
r = new Report(3225);
r.subject = subjectId;
r.add(numRapidFireHits * 3);
vPhaseReport.add(r);
}
} else {
// Keep spacing consistent
Report.addNewline(vPhaseReport);
}
r = new Report(3460);
r.subject = subjectId;
r.add(entityTarget.getShortName());
r.add(entityTarget.getLocationAbbr(pcHit));
r.indent(2);
vPhaseReport.addElement(r);
int damagableCoverType = LosEffects.DAMAGABLE_COVER_NONE;
Building coverBuilding = null;
Entity coverDropship = null;
Coords coverLoc = null;
// Determine if there is primary and secondary cover,
// and then determine which one gets hit
if ((toHit.getCover() == LosEffects.COVER_75RIGHT || toHit.getCover() == LosEffects.COVER_75LEFT)
||
// 75% cover has a primary and secondary
(toHit.getCover() == LosEffects.COVER_HORIZONTAL && toHit
.getDamagableCoverTypeSecondary() != LosEffects.DAMAGABLE_COVER_NONE)) {
// Horiztonal cover provided by two 25%'s, so primary and secondary
int hitLoc = pcHit.getLocation();
// Primary stores the left side, from the perspective of the
// attacker
if (hitLoc == Mech.LOC_RLEG || hitLoc == Mech.LOC_RT
|| hitLoc == Mech.LOC_RARM) {
// Left side is primary
damagableCoverType = toHit.getDamagableCoverTypePrimary();
coverBuilding = toHit.getCoverBuildingPrimary();
coverDropship = toHit.getCoverDropshipPrimary();
coverLoc = toHit.getCoverLocPrimary();
} else {
// If not left side, then right side, which is secondary
damagableCoverType = toHit.getDamagableCoverTypeSecondary();
coverBuilding = toHit.getCoverBuildingSecondary();
coverDropship = toHit.getCoverDropshipSecondary();
coverLoc = toHit.getCoverLocSecondary();
}
} else { // Only primary cover exists
damagableCoverType = toHit.getDamagableCoverTypePrimary();
coverBuilding = toHit.getCoverBuildingPrimary();
coverDropship = toHit.getCoverDropshipPrimary();
coverLoc = toHit.getCoverLocPrimary();
}
// Check if we need to damage the cover that absorbed the hit.
if (damagableCoverType == LosEffects.DAMAGABLE_COVER_DROPSHIP) {
// We need to adjust some state and then restore it later
// This allows us to make a call to handleEntityDamage
ToHitData savedToHit = toHit;
int savedAimingMode = waa.getAimingMode();
waa.setAimingMode(IAimingModes.AIM_MODE_NONE);
int savedAimedLocation = waa.getAimedLocation();
waa.setAimedLocation(Entity.LOC_NONE);
boolean savedSalvo = bSalvo;
bSalvo = true;
// Create new toHitData
toHit = new ToHitData(0, "", ToHitData.HIT_NORMAL,
Compute.targetSideTable(ae, coverDropship));
// Report cover was damaged
int sizeBefore = vPhaseReport.size();
r = new Report(3465);
r.subject = subjectId;
r.add(coverDropship.getShortName());
vPhaseReport.add(r);
// Damage the dropship
handleEntityDamage(coverDropship, vPhaseReport, bldg, hits,
nCluster, bldgAbsorbs);
// Remove a blank line in the report list
if (vPhaseReport.elementAt(sizeBefore).newlines > 0)
vPhaseReport.elementAt(sizeBefore).newlines--;
// Indent reports related to the damage absorption
while (sizeBefore < vPhaseReport.size()) {
vPhaseReport.elementAt(sizeBefore).indent(3);
sizeBefore++;
}
// Restore state
toHit = savedToHit;
waa.setAimingMode(savedAimingMode);
waa.setAimedLocation(savedAimedLocation);
bSalvo = savedSalvo;
// Damage a building that blocked a shot
} else if (damagableCoverType == LosEffects.DAMAGABLE_COVER_BUILDING) {
// Normal damage
int nDamage = nDamPerHit * Math.min(nCluster, hits);
Vector<Report> buildingReport = server.damageBuilding(
coverBuilding, nDamage, " blocks the shot and takes ",
coverLoc);
for (Report report : buildingReport) {
report.subject = subjectId;
report.indent();
}
vPhaseReport.addAll(buildingReport);
// Damage any infantry in the building.
Vector<Report> infantryReport = server.damageInfantryIn(
coverBuilding, nDamage, coverLoc,
wtype.getInfantryDamageClass());
for (Report report : infantryReport) {
report.indent(2);
}
vPhaseReport.addAll(infantryReport);
}
missed = true;
}
/**
* Handle damage against an entity, called once per hit by default.
*
* @param entityTarget
* @param vPhaseReport
* @param bldg
* @param hits
* @param nCluster
* @param bldgAbsorbs
*/
protected void handleEntityDamage(Entity entityTarget,
Vector<Report> vPhaseReport, Building bldg, int hits, int nCluster,
int bldgAbsorbs) {
int nDamage;
missed = false;
hit = entityTarget.rollHitLocation(toHit.getHitTable(),
toHit.getSideTable(), waa.getAimedLocation(),
waa.getAimingMode(), toHit.getCover());
hit.setGeneralDamageType(generalDamageType);
hit.setCapital(wtype.isCapital());
hit.setBoxCars(roll == 12);
hit.setCapMisCritMod(getCapMisMod());
hit.setFirstHit(firstHit);
hit.setAttackerId(getAttackerId());
if (weapon.isWeaponGroup()) {
hit.setSingleAV(attackValue);
}
boolean isIndirect = wtype.hasModes()
&& weapon.curMode().equals("Indirect");
if (!isIndirect
&& entityTarget.removePartialCoverHits(hit.getLocation(), toHit
.getCover(), Compute.targetSideTable(ae, entityTarget,
weapon.getCalledShot().getCall()))) {
// Weapon strikes Partial Cover.
handlePartialCoverHit(entityTarget, vPhaseReport, hit, bldg, hits,
nCluster, bldgAbsorbs);
return;
}
if (!bSalvo) {
// Each hit in the salvo get's its own hit location.
Report r = new Report(3405);
r.subject = subjectId;
r.add(toHit.getTableDesc());
r.add(entityTarget.getLocationAbbr(hit));
vPhaseReport.addElement(r);
if (weapon.isRapidfire()) {
r.newlines = 0;
r = new Report(3225);
r.subject = subjectId;
r.add(numRapidFireHits * 3);
vPhaseReport.add(r);
}
} else {
Report.addNewline(vPhaseReport);
}
// for non-salvo shots, report that the aimed shot was successfull
// before applying damage
if (hit.hitAimedLocation() && !bSalvo) {
Report r = new Report(3410);
r.subject = subjectId;
vPhaseReport.lastElement().newlines = 0;
vPhaseReport.addElement(r);
}
// Resolve damage normally.
nDamage = nDamPerHit * Math.min(nCluster, hits);
if (bDirect) {
hit.makeDirectBlow(toHit.getMoS() / 3);
}
// A building may be damaged, even if the squad is not.
if (bldgAbsorbs > 0) {
int toBldg = Math.min(bldgAbsorbs, nDamage);
nDamage -= toBldg;
Report.addNewline(vPhaseReport);
Vector<Report> buildingReport = server.damageBuilding(bldg, toBldg,
entityTarget.getPosition());
for (Report report : buildingReport) {
report.subject = subjectId;
}
vPhaseReport.addAll(buildingReport);
}
nDamage = checkTerrain(nDamage, entityTarget, vPhaseReport);
nDamage = checkLI(nDamage, entityTarget, vPhaseReport);
// some buildings scale remaining damage that is not absorbed
// TODO: this isn't quite right for castles brian
if (null != bldg) {
nDamage = (int) Math.floor(bldg.getDamageToScale() * nDamage);
}
// A building may absorb the entire shot.
if (nDamage == 0) {
Report r = new Report(3415);
r.subject = subjectId;
r.indent(2);
r.addDesc(entityTarget);
vPhaseReport.addElement(r);
missed = true;
} else {
if (bGlancing) {
hit.makeGlancingBlow();
}
vPhaseReport
.addAll(server.damageEntity(entityTarget, hit, nDamage,
false, ae.getSwarmTargetId() == entityTarget
.getId() ? DamageType.IGNORE_PASSENGER
: damageType, false, false, throughFront,
underWater, nukeS2S));
// for salvo shots, report that the aimed location was hit after
// applying damage, because the location is first reported when
// dealing the damage
if (hit.hitAimedLocation() && bSalvo) {
Report r = new Report(3410);
r.subject = subjectId;
vPhaseReport.lastElement().newlines = 0;
vPhaseReport.addElement(r);
}
}
// If a BA squad is shooting at infantry, damage may be random and need
// to be rerolled for the next hit (if any) from the same attack.
if ((ae instanceof BattleArmor) && (target instanceof Infantry)) {
nDamPerHit = calcDamagePerHit();
}
}
protected void handleIgnitionDamage(Vector<Report> vPhaseReport,
Building bldg, int hits) {
if (!bSalvo) {
// hits!
Report r = new Report(2270);
r.subject = subjectId;
r.newlines = 0;
vPhaseReport.addElement(r);
}
TargetRoll tn = new TargetRoll(wtype.getFireTN(), wtype.getName());
if (tn.getValue() != TargetRoll.IMPOSSIBLE) {
Report.addNewline(vPhaseReport);
server.tryIgniteHex(target.getPosition(), subjectId, false, false,
tn, true, -1, vPhaseReport);
}
}
protected void handleClearDamage(Vector<Report> vPhaseReport,
Building bldg, int nDamage) {
handleClearDamage(vPhaseReport, bldg, nDamage, true);
}
protected void handleClearDamage(Vector<Report> vPhaseReport,
Building bldg, int nDamage, boolean hitReport) {
if (!bSalvo && hitReport) {
// hits!
Report r = new Report(2270);
r.subject = subjectId;
vPhaseReport.addElement(r);
}
// report that damage was "applied" to terrain
Report r = new Report(3385);
r.indent(2);
r.subject = subjectId;
r.add(nDamage);
vPhaseReport.addElement(r);
// Any clear attempt can result in accidental ignition, even
// weapons that can't normally start fires. that's weird.
// Buildings can't be accidentally ignited.
// TODO: change this for TacOps - now you roll another 2d6 first and on
// a 5 or less
// you do a normal ignition as though for intentional fires
if ((bldg != null)
&& server.tryIgniteHex(target.getPosition(), subjectId, false,
false,
new TargetRoll(wtype.getFireTN(), wtype.getName()), 5,
vPhaseReport)) {
return;
}
Vector<Report> clearReports = server.tryClearHex(target.getPosition(),
nDamage, subjectId);
if (clearReports.size() > 0) {
vPhaseReport.lastElement().newlines = 0;
}
vPhaseReport.addAll(clearReports);
return;
}
protected void handleBuildingDamage(Vector<Report> vPhaseReport,
Building bldg, int nDamage, Coords coords) {
if (!bSalvo) {
// hits!
Report r = new Report(3390);
r.subject = subjectId;
vPhaseReport.addElement(r);
}
Report.addNewline(vPhaseReport);
Vector<Report> buildingReport = server.damageBuilding(bldg, nDamage,
coords);
for (Report report : buildingReport) {
report.subject = subjectId;
}
vPhaseReport.addAll(buildingReport);
// Damage any infantry in the hex.
vPhaseReport.addAll(server.damageInfantryIn(bldg, nDamage, coords,
wtype.getInfantryDamageClass()));
}
protected boolean allShotsHit() {
if ((((target.getTargetType() == Targetable.TYPE_BLDG_IGNITE) || (target
.getTargetType() == Targetable.TYPE_BUILDING)) && (nRange <= 1))
|| (target.getTargetType() == Targetable.TYPE_HEX_CLEAR)) {
return true;
}
if (game.getOptions().booleanOption("aero_sanity")
&& target.getTargetType() == Targetable.TYPE_ENTITY
&& ((Entity) target).isCapitalScale()
&& !((Entity) target).isCapitalFighter()) {
return true;
}
return false;
}
protected void reportMiss(Vector<Report> vPhaseReport) {
reportMiss(vPhaseReport, false);
}
protected void reportMiss(Vector<Report> vPhaseReport, boolean singleNewline) {
// Report the miss.
Report r = new Report(3220);
r.subject = subjectId;
if (singleNewline) {
r.newlines = 1;
} else {
r.newlines = 2;
}
vPhaseReport.addElement(r);
}
protected WeaponHandler() {
// deserialization only
}
// Among other things, basically a refactored Server#preTreatWeaponAttack
public WeaponHandler(ToHitData t, WeaponAttackAction w, IGame g, Server s) {
damageType = DamageType.NONE;
toHit = t;
waa = w;
game = g;
ae = game.getEntity(waa.getEntityId());
weapon = ae.getEquipment(waa.getWeaponId());
wtype = (WeaponType) weapon.getType();
typeName = wtype.getInternalName();
target = game.getTarget(waa.getTargetType(), waa.getTargetId());
server = s;
subjectId = getAttackerId();
nRange = Compute.effectiveDistance(game, ae, target);
if (target instanceof Mech) {
throughFront = Compute.isThroughFrontHex(game, ae.getPosition(),
(Entity) target);
} else {
throughFront = true;
}
// is this an underwater attack on a surface naval vessel?
underWater = toHit.getHitTable() == ToHitData.HIT_UNDERWATER;
if (null != ae.getCrew()) {
roll = ae.getCrew().rollGunnerySkill();
} else {
roll = Compute.d6(2);
}
nweapons = getNumberWeapons();
nweaponsHit = 1;
// use ammo when creating this, so it works when shooting the last shot
// a unit has and we fire multiple weapons of the same type
// TODO: need to adjust this for cases where not all the ammo is
// available
for (int i = 0; i < nweapons; i++) {
useAmmo();
}
if (target instanceof Entity) {
((Entity) target).addAttackedByThisTurn(w.getEntityId());
}
}
protected void useAmmo() {
if (wtype.hasFlag(WeaponType.F_ONESHOT)) {
weapon.setFired(true);
}
setDone();
}
protected void setDone() {
weapon.setUsedThisRound(true);
}
protected void addHeat() {
if (!(toHit.getValue() == TargetRoll.IMPOSSIBLE)) {
if (ae.usesWeaponBays() && !game.getOptions().booleanOption("heat_by_bay")) {
int loc = weapon.getLocation();
boolean rearMount = weapon.isRearMounted();
if (!ae.hasArcFired(loc, rearMount)) {
ae.heatBuildup += ae.getHeatInArc(loc, rearMount);
ae.setArcFired(loc, rearMount);
}
} else {
if (!isStrafing() || isStrafingFirstShot()) {
ae.heatBuildup += (weapon.getCurrentHeat());
}
}
}
}
/**
* Does this attack use the cluster hit table? necessary to determine how
* Aero damage should be applied
*/
protected boolean usesClusterTable() {
return false;
}
/**
* special resolution, like minefields and arty
*
* @param vPhaseReport - a <code>Vector</code> containing the phase report
* @param entityTarget - the <code>Entity</code> targeted, or <code>null</code>, if
* no Entity targeted
* @return true when done with processing, false when not
*/
protected boolean specialResolution(Vector<Report> vPhaseReport,
Entity entityTarget) {
return false;
}
public boolean announcedEntityFiring() {
return announcedEntityFiring;
}
public void setAnnouncedEntityFiring(boolean announcedEntityFiring) {
this.announcedEntityFiring = announcedEntityFiring;
}
public WeaponAttackAction getWaa() {
return waa;
}
public int checkTerrain(int nDamage, Entity entityTarget,
Vector<Report> vPhaseReport) {
boolean isAboveWoods = ((entityTarget != null) && ((entityTarget
.relHeight() >= 2) || (entityTarget.isAirborne())));
if (game.getOptions().booleanOption("tacops_woods_cover")
&& !isAboveWoods
&& (game.getBoard().getHex(entityTarget.getPosition())
.containsTerrain(Terrains.WOODS) || game.getBoard()
.getHex(entityTarget.getPosition())
.containsTerrain(Terrains.JUNGLE))
&& !(entityTarget.getSwarmAttackerId() == ae.getId())) {
ITerrain woodHex = game.getBoard()
.getHex(entityTarget.getPosition())
.getTerrain(Terrains.WOODS);
ITerrain jungleHex = game.getBoard()
.getHex(entityTarget.getPosition())
.getTerrain(Terrains.JUNGLE);
int treeAbsorbs = 0;
String hexType = "";
if (woodHex != null) {
treeAbsorbs = woodHex.getLevel() * 2;
hexType = "wooded";
} else if (jungleHex != null) {
treeAbsorbs = jungleHex.getLevel() * 2;
hexType = "jungle";
}
// Do not absorb more damage than the weapon can do.
treeAbsorbs = Math.min(nDamage, treeAbsorbs);<|fim▁hole|> Report.addNewline(vPhaseReport);
Report terrainReport = new Report(6427);
terrainReport.subject = entityTarget.getId();
terrainReport.add(hexType);
terrainReport.add(treeAbsorbs);
terrainReport.indent(2);
terrainReport.newlines = 0;
vPhaseReport.add(terrainReport);
}
return nDamage;
}
/**
* Check for Laser Inhibiting smoke clouds
*/
public int checkLI(int nDamage, Entity entityTarget,
Vector<Report> vPhaseReport) {
weapon = ae.getEquipment(waa.getWeaponId());
wtype = (WeaponType) weapon.getType();
ArrayList<Coords> coords = Coords.intervening(ae.getPosition(),
entityTarget.getPosition());
int refrac = 0;
double travel = 0;
double range = ae.getPosition().distance(target.getPosition());
double atkLev = ae.relHeight();
double tarLev = entityTarget.relHeight();
double levDif = Math.abs(atkLev - tarLev);
String hexType = "LASER inhibiting smoke";
// loop through all intervening coords.
// If you could move this to compute.java, then remove - import
// java.util.ArrayList;
for (Coords curr : coords) {
// skip hexes not actually on the board
if (!game.getBoard().contains(curr)) {
continue;
}
ITerrain smokeHex = game.getBoard().getHex(curr)
.getTerrain(Terrains.SMOKE);
if (game.getBoard().getHex(curr).containsTerrain(Terrains.SMOKE)
&& wtype.hasFlag(WeaponType.F_ENERGY)
&& ((smokeHex.getLevel() == SmokeCloud.SMOKE_LI_LIGHT) || (smokeHex
.getLevel() == SmokeCloud.SMOKE_LI_HEAVY))) {
int levit = ((game.getBoard().getHex(curr).getLevel()) + 2);
// does the hex contain LASER inhibiting smoke?
if ((tarLev > atkLev)
&& (levit >= ((travel * (levDif / range)) + atkLev))) {
refrac++;
} else if ((atkLev > tarLev)
&& (levit >= (((range - travel) * (levDif / range)) + tarLev))) {
refrac++;
} else if ((atkLev == tarLev) && (levit >= 0)) {
refrac++;
}
travel++;
}
}
if (refrac != 0) {
// Damage reduced by 2 for each interviening smoke.
refrac = (refrac * 2);
// Do not absorb more damage than the weapon can do. (Are both of
// these really necessary?)
refrac = Math.min(nDamage, refrac);
nDamage = Math.max(0, (nDamage - refrac));
Report.addNewline(vPhaseReport);
Report fogReport = new Report(6427);
fogReport.subject = entityTarget.getId();
fogReport.add(hexType);
fogReport.add(refrac);
fogReport.indent(2);
fogReport.newlines = 0;
vPhaseReport.add(fogReport);
}
return nDamage;
}
protected boolean canDoDirectBlowDamage() {
return true;
}
/**
* Insert any additionaly attacks that should occur before this attack
*/
protected void insertAttacks(IGame.Phase phase, Vector<Report> vPhaseReport) {
return;
}
/**
* @return the number of weapons of this type firing (for squadron weapon
* groups)
*/
protected int getNumberWeapons() {
return weapon.getNWeapons();
}
/**
* Restores the equipment from the name
*/
public void restore() {
if (typeName == null) {
typeName = wtype.getName();
} else {
wtype = (WeaponType) EquipmentType.get(typeName);
}
if (wtype == null) {
System.err
.println("WeaponHandler.restore: could not restore equipment type \""
+ typeName + "\"");
}
}
protected int getClusterModifiers(boolean clusterRangePenalty) {
int nMissilesModifier = nSalvoBonus;
int[] ranges = wtype.getRanges(weapon);
if (clusterRangePenalty && game.getOptions().booleanOption("tacops_clusterhitpen")) {
if (nRange <= 1) {
nMissilesModifier += 1;
} else if (nRange <= ranges[RangeType.RANGE_MEDIUM]) {
nMissilesModifier += 0;
} else {
nMissilesModifier -= 1;
}
}
if (game.getOptions().booleanOption(OptionsConstants.AC_TAC_OPS_RANGE)
&& (nRange > ranges[RangeType.RANGE_LONG])) {
nMissilesModifier -= 2;
}
if (game.getOptions().booleanOption(OptionsConstants.AC_TAC_OPS_LOS_RANGE)
&& (nRange > ranges[RangeType.RANGE_EXTREME])) {
nMissilesModifier -= 3;
}
if (bGlancing) {
nMissilesModifier -= 4;
}
if (bDirect) {
nMissilesModifier += (toHit.getMoS() / 3) * 2;
}
if (game.getPlanetaryConditions().hasEMI()) {
nMissilesModifier -= 2;
}
if (null != ae.getCrew()) {
if (ae.getCrew().getOptions().booleanOption("sandblaster")
&& ae.getCrew().getOptions().stringOption("weapon_specialist")
.equals(wtype.getName())) {
if (nRange > ranges[RangeType.RANGE_MEDIUM]) {
nMissilesModifier += 2;
} else if (nRange > ranges[RangeType.RANGE_SHORT]) {
nMissilesModifier += 3;
} else {
nMissilesModifier += 4;
}
} else if (ae.getCrew().getOptions().booleanOption("cluster_master")) {
nMissilesModifier += 2;
} else if (ae.getCrew().getOptions().booleanOption("cluster_hitter")) {
nMissilesModifier += 1;
}
}
return nMissilesModifier;
}
public boolean isStrafing() {
return isStrafing;
}
public void setStrafing(boolean isStrafing) {
this.isStrafing = isStrafing;
}
public boolean isStrafingFirstShot() {
return isStrafingFirstShot;
}
public void setStrafingFirstShot(boolean isStrafingFirstShot) {
this.isStrafingFirstShot = isStrafingFirstShot;
}
}<|fim▁end|> |
nDamage = Math.max(0, nDamage - treeAbsorbs);
server.tryClearHex(entityTarget.getPosition(), treeAbsorbs,
ae.getId()); |
<|file_name|>0079_auto_20160620_1418.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.db import models, migrations
def sector_validation(apps, schema_editor):
""" Remove sector from RSR validation set """
ProjectEditorValidation = apps.get_model('rsr', 'ProjectEditorValidation')
sector_validators = ['rsr_sector', 'rsr_sector.sector_code', 'rsr_sector.vocabulary']
for v in sector_validators:
validation = ProjectEditorValidation.objects.filter(validation_set_id=1, validation__exact=v)
if validation:
validation.delete()
def undo_sector_validation(apps, schema_editor):
""" Remove sector from RSR validation set """
ProjectEditorValidation = apps.get_model('rsr', 'ProjectEditorValidation')
sector_validators = ['rsr_sector', 'rsr_sector.sector_code', 'rsr_sector.vocabulary']
for v in sector_validators:
ProjectEditorValidation.objects.get_or_create(validation=v, action=1, validation_set_id=1)
class Migration(migrations.Migration):
dependencies = [
('rsr', '0078_auto_20160613_1428'),
]
operations = [
migrations.RunPython(sector_validation, undo_sector_validation),
]<|fim▁end|> | # -*- coding: utf-8 -*-
|
<|file_name|>test_module_onboarding.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Copyright (c) 2020, Frappe Technologies and Contributors
# License: MIT. See LICENSE
# import frappe
import unittest
class TestModuleOnboarding(unittest.TestCase):
pass<|fim▁end|> | # -*- coding: utf-8 -*- |
<|file_name|>GlobalIOThread.cpp<|end_file_name|><|fim▁begin|>/*
Copyright_License {
XCSoar Glide Computer - http://www.xcsoar.org/
Copyright (C) 2000-2015 The XCSoar Project
A detailed list of copyright holders can be found in the file "AUTHORS".
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
}
*/
#include "GlobalIOThread.hpp"
#include "IOThread.hpp"
IOThread *io_thread;
void
InitialiseIOThread()
{
assert(io_thread == NULL);
io_thread = new IOThread();
io_thread->Start();
}
void
DeinitialiseIOThread()
{
io_thread->Stop();<|fim▁hole|>}<|fim▁end|> | delete io_thread;
io_thread = nullptr; |
<|file_name|>text.py<|end_file_name|><|fim▁begin|>"""
This module implements the TextResponse class which adds encoding handling and
discovering (through HTTP headers) to base Response class.
See documentation in docs/topics/request-response.rst
"""
from w3lib.encoding import html_to_unicode, resolve_encoding, \
html_body_declared_encoding, http_content_type_encoding
from scrapy.http.response import Response
from scrapy.utils.python import memoizemethod_noargs
class TextResponse(Response):
_DEFAULT_ENCODING = 'ascii'
def __init__(self, *args, **kwargs):
self._encoding = kwargs.pop('encoding', None)
self._cached_benc = None
self._cached_ubody = None
super(TextResponse, self).__init__(*args, **kwargs)
def _set_url(self, url):
if isinstance(url, unicode):
if self.encoding is None:
raise TypeError('Cannot convert unicode url - %s has no encoding' %
type(self).__name__)
self._url = url.encode(self.encoding)
else:
super(TextResponse, self)._set_url(url)
def _set_body(self, body):
self._body = ''
if isinstance(body, unicode):
if self.encoding is None:
raise TypeError('Cannot convert unicode body - %s has no encoding' %
type(self).__name__)
self._body = body.encode(self._encoding)
else:
super(TextResponse, self)._set_body(body)
def replace(self, *args, **kwargs):
kwargs.setdefault('encoding', self.encoding)
return Response.replace(self, *args, **kwargs)
@property
def encoding(self):
return self._declared_encoding() or self._body_inferred_encoding()
def _declared_encoding(self):
return self._encoding or self._headers_encoding() \
or self._body_declared_encoding()
<|fim▁hole|> """Return body as unicode"""
# check for self.encoding before _cached_ubody just in
# _body_inferred_encoding is called
benc = self.encoding
if self._cached_ubody is None:
charset = 'charset=%s' % benc
self._cached_ubody = html_to_unicode(charset, self.body)[1]
return self._cached_ubody
@memoizemethod_noargs
def _headers_encoding(self):
content_type = self.headers.get('Content-Type')
return http_content_type_encoding(content_type)
def _body_inferred_encoding(self):
if self._cached_benc is None:
content_type = self.headers.get('Content-Type')
benc, ubody = html_to_unicode(content_type, self.body, \
auto_detect_fun=self._auto_detect_fun, \
default_encoding=self._DEFAULT_ENCODING)
self._cached_benc = benc
self._cached_ubody = ubody
return self._cached_benc
def _auto_detect_fun(self, text):
for enc in (self._DEFAULT_ENCODING, 'utf-8', 'cp1252'):
try:
text.decode(enc)
except UnicodeError:
continue
return resolve_encoding(enc)
@memoizemethod_noargs
def _body_declared_encoding(self):
return html_body_declared_encoding(self.body)<|fim▁end|> | def body_as_unicode(self): |
<|file_name|>test_pass_alter_op_layout.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test alter op layout pass"""
import tvm
from tvm import relay
from tvm.relay.op import register_alter_op_layout
from tvm.relay import transform, analysis
def run_opt_pass(expr, passes):
passes = passes if isinstance(passes, list) else [passes]
mod = relay.Module.from_expr(expr)
seq = transform.Sequential(passes)
with transform.PassContext(opt_level=3):
mod = seq(mod)
entry = mod["main"]
return entry if isinstance(expr, relay.Function) else entry.body
def test_alter_op():
"""Test directly replacing an operator with a new one"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var('weight', shape=(64, 64, 3, 3))
y = relay.nn.conv2d(x, weight,
channels=64,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y
@register_alter_op_layout("nn.conv2d", level=100)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
weight = relay.multiply(weight, relay.const(2.0, "float32"))
return relay.nn.conv2d(data, weight, **attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var('weight', shape=(64, 64, 3, 3))
y = relay.nn.conv2d(x, relay.multiply(weight, relay.const(2.0, "float32")),
channels=64,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_return_none():
"""Test doing nothing by returning 'None' """
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
y = relay.nn.global_max_pool2d(x)
y = relay.Function([x], y)
return y
called = [False]
@register_alter_op_layout("nn.global_max_pool2d", level=101)
def alter_conv2d(attrs, inputs, tinfos):
called[0] = True
return None
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = before()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
assert(called[0])
def test_alter_layout():
"""Test alternating the layout of a conv2d.
The layout of broadcast operators and the weight should be changed accordingly.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias")
weight = relay.var("weight")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.bias_add(y, bias)
# a useless tuple, which will be eliminated
y = relay.Tuple([y])[0]
y = relay.nn.relu(y)
y = relay.nn.max_pool2d(y, pool_size=(2, 2))
y = relay.cast(y, 'int32')
y = relay.nn.batch_flatten(y)
y = relay.Function(analysis.free_vars(y), y)
return y
@register_alter_op_layout("nn.conv2d", level=102)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
new_attrs['kernel_layout'] = 'OIHW16i'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias", shape=(64,))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.layout_transform(x, "NCHW", "NCHW16c")
w = relay.layout_transform(weight, "OIHW", "OIHW16i")
y = relay.nn.conv2d(y, w,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
kernel_layout="OIHW16i",
data_layout="NCHW16c")
b = relay.expand_dims(bias, axis=1, num_newaxis=2)
b = relay.layout_transform(b, "CHW", "CHW16c")
y = relay.add(y, b)
y = relay.nn.relu(y)
y = relay.nn.max_pool2d(y, pool_size=(2, 2), layout="NCHW16c")
y = relay.cast(y, 'int32')
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.nn.batch_flatten(y)
y = relay.Function(analysis.free_vars(y), y)
return y
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(),
transform.AlterOpLayout()])
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_dual_path():
"""
Test alternating the layout with two outputs.
One path continues to use the new layout while one path fall backs to old layout.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var('weight1')
weight2 = relay.var('weight2')
y = relay.nn.conv2d(x, weight1,
channels=32,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y1 = relay.nn.conv2d(y, weight2,
channels=32,
kernel_size=(3, 3),
padding=(1, 1))
y1 = relay.nn.relu(y1)
y2 = relay.nn.batch_flatten(y)
ret = relay.Tuple([y1, y2])
y = relay.Function(analysis.free_vars(ret), ret)
return y
@register_alter_op_layout("nn.conv2d", level=103)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var('weight1')
weight2 = relay.var('weight2')
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(y, weight1,
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NCHW16c")
y = relay.nn.relu(y)
y1 = relay.nn.conv2d(y, weight2,
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NCHW16c')
y1 = relay.nn.relu(y1)
y1 = relay.layout_transform(y1, "NCHW16c", "NCHW")
y2 = relay.layout_transform(y, "NCHW16c", "NCHW")
y2 = relay.nn.batch_flatten(y2)
ret = relay.Tuple([y1, y2])
y = relay.Function(analysis.free_vars(ret), ret)
return y
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_resnet():
"""Test alternating the layout of a residual block
This also tests the elimination of duplicated transformation.
If a same transformation applies to a same node twice, only one transformation will be created.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var('weight1')
weight2 = relay.var('weight2')
y = relay.nn.conv2d(x, weight1,
channels=32,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y2 = relay.nn.conv2d(x, weight2,
channels=32,
kernel_size=(1, 1))
y2 = relay.nn.relu(y2)
y = y + y2
y = relay.nn.global_max_pool2d(y)
return relay.Function(analysis.free_vars(y), y)
@register_alter_op_layout("nn.conv2d", level=104)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var('weight1')
weight2 = relay.var('weight2')
x = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(x, weight1,
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NCHW16c")
y = relay.nn.relu(y)
y2 = relay.nn.conv2d(x, weight2,
channels=32,
kernel_size=(1, 1),
data_layout='NCHW16c')
y2 = relay.nn.relu(y2)
y = y + y2
y = relay.nn.global_max_pool2d(y, layout="NCHW16c")
y = relay.layout_transform(y, "NCHW16c", "NCHW")
return relay.Function(analysis.free_vars(y), y)
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_broadcast_op():
"""Test boradcast operators """
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias", shape=(64,))
scale = relay.var("scale", shape=(64, 1, 1))
weight = relay.var("weight")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.bias_add(y, bias) # test broadcasting to lhs
y = relay.multiply(scale, y) # test broadcasting to rhs
y = relay.Function(analysis.free_vars(y), y)
return y
@register_alter_op_layout("nn.conv2d", level=105)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias", shape=(64,))
scale = relay.var("scale", shape=(64, 1, 1))
weight = relay.var("weight")
x = relay.layout_transform(x, "NCHW", "NCHW16c")
bias = relay.expand_dims(bias, 1, 2)
bias = relay.layout_transform(bias, "CHW", "CHW16c")
scale = relay.layout_transform(scale, "CHW", "CHW16c")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1),
data_layout="NCHW16c")
y = relay.add(y, bias) # test broadcasting to lhs
y = relay.multiply(scale, y) # test broadcasting to rhs
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(),
transform.AlterOpLayout()])
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_scalar():
"""Test alternating the layout of a conv2d.
The layout of broadcast operators and the weight should be changed accordingly.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.add(y, relay.const(1, "float32"))
y = relay.Function(analysis.free_vars(y), y)
return y
@register_alter_op_layout("nn.conv2d", level=106)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
w = relay.var("weight")
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(y, w,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NCHW16c")
y = relay.add(y, relay.const(1.0, "float32"))
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(),
transform.AlterOpLayout()])
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_concatenate():
""" """
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var('weight1')
weight2 = relay.var('weight2')
y = relay.nn.conv2d(x, weight1,
channels=32,
kernel_size=(3, 3),
padding=(1, 1))
y1 = relay.nn.conv2d(y, weight2,
channels=32,
kernel_size=(3, 3),
padding=(1, 1))
ret = relay.concatenate([y, y1], axis=1)
y = relay.Function(analysis.free_vars(ret), ret)
return y
@register_alter_op_layout("nn.conv2d", level=107)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var('weight1')
weight2 = relay.var('weight2')
y = relay.layout_transform(x, "NCHW", "NCHW16c")<|fim▁hole|> data_layout="NCHW16c")
y1 = relay.nn.conv2d(y, weight2,
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NCHW16c')
ret = relay.concatenate([y, y1], axis=1)
ret = relay.layout_transform(ret, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(ret), ret)
return y
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_nchw_upsamping_op():
"""Test upsamping operators """
def before():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var('weight', shape=(32, 32, 3, 3))
y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.upsampling(y, scale=2)
y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2))
y = relay.Function(analysis.free_vars(y), y)
return y
@register_alter_op_layout("nn.conv2d", level=108)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var("weight")
x = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1),
data_layout="NCHW16c")
y = relay.nn.upsampling(y, scale=2, layout="NCHW16c")
y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2), layout='NCHW16c')
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(),
transform.AlterOpLayout()])
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_strided_slice():
"""Test rewriting strided_slice during alter_iop_layout"""
def before():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var('weight', shape=(32, 32, 3, 3))
y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1))
y = relay.strided_slice(y, begin=[0, 16], end=[None, None])
y = relay.Function(analysis.free_vars(y), y)
return y
@register_alter_op_layout("nn.conv2d", level=109)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW4c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var("weight")
x = relay.layout_transform(x, "NCHW", "NCHW4c")
y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1),
data_layout="NCHW4c")
y = relay.strided_slice(y, begin=[0, 4], end=[None, 8])
y = relay.layout_transform(y, "NCHW4c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(),
transform.AlterOpLayout()])
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_depthwise_conv2d():
"""Test depthwise_conv2d operator"""
def before():
x = relay.var("x", shape=(1, 32, 56, 56))
w = relay.var("w", shape=(32, 1, 3, 3))
y = relay.nn.conv2d(x, w, padding=(1, 1), channels=32, kernel_size=(3, 3), groups=32)
y = relay.Function(analysis.free_vars(y), y)
return y
import topi
@register_alter_op_layout("nn.conv2d", level=110)
def alter_conv2d(attrs, inputs, tinfos):
with tvm.target.create("llvm"):
return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, relay)
def expected():
x = relay.var("x", shape=(1, 32, 56, 56))
w = relay.var("w", shape=(32, 1, 3, 3))
x = relay.layout_transform(x, "NCHW", "NCHW8c")
w = relay.layout_transform(w, "OIHW", "OIHW1i8o")
y = relay.nn.contrib_depthwise_conv2d_nchwc(x, w, padding=(1, 1), channels=32, kernel_size=(3, 3),
groups=32, data_layout="NCHW8c", kernel_layout="OIHW1i8o",
out_layout="NCHW8c")
y = relay.layout_transform(y, "NCHW8c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(),
transform.AlterOpLayout()])
b = expected()
b = run_opt_pass(b, transform.InferType())
assert(analysis.alpha_equal(a, b))
def test_alter_layout_prelu():
"""Test PRelu operator"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight")
alpha = relay.var("alpha", relay.IncompleteType())
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.prelu(y, alpha)
y = relay.Function(analysis.free_vars(y), y)
return y
@register_alter_op_layout("nn.conv2d", level=111)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
w = relay.var("weight")
alpha = relay.var("alpha", relay.IncompleteType())
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(y, w,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NCHW16c")
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.nn.prelu(y, alpha)
y = relay.Function(analysis.free_vars(y), y)
return y
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
b = expected()
b = run_opt_pass(b, transform.InferType())
assert(analysis.alpha_equal(a, b))
if __name__ == "__main__":
test_alter_op()
test_alter_return_none()
test_alter_layout()
test_alter_layout_dual_path()
test_alter_layout_resnet()
test_alter_layout_broadcast_op()
test_alter_layout_scalar()
test_alter_layout_concatenate()
test_alter_layout_nchw_upsamping_op()
test_alter_layout_strided_slice()
test_alter_layout_depthwise_conv2d()
test_alter_layout_prelu()<|fim▁end|> | y = relay.nn.conv2d(y, weight1,
channels=32,
kernel_size=(3, 3),
padding=(1, 1), |
<|file_name|>buttonwidget.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the documentation of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:BSD$
** You may use this file under the terms of the BSD license as follows:
**
** "Redistribution and use in source and binary forms, with or without
** modification, are permitted provided that the following conditions are
** met:
** * Redistributions of source code must retain the above copyright
** notice, this list of conditions and the following disclaimer.
** * Redistributions in binary form must reproduce the above copyright
** notice, this list of conditions and the following disclaimer in
** the documentation and/or other materials provided with the
** distribution.
** * Neither the name of Digia Plc and its Subsidiary(-ies) nor the names
** of its contributors may be used to endorse or promote products derived
** from this software without specific prior written permission.
**
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <QtGui>
#include "buttonwidget.h"
//! [0]
ButtonWidget::ButtonWidget(QStringList texts, QWidget *parent)
: QWidget(parent)
{
signalMapper = new QSignalMapper(this);
QGridLayout *gridLayout = new QGridLayout;
for (int i = 0; i < texts.size(); ++i) {
QPushButton *button = new QPushButton(texts[i]);
connect(button, SIGNAL(clicked()), signalMapper, SLOT(map()));
//! [0] //! [1]
signalMapper->setMapping(button, texts[i]);
gridLayout->addWidget(button, i / 3, i % 3);
}
<|fim▁hole|> this, SIGNAL(clicked(QString)));
setLayout(gridLayout);
}
//! [2]<|fim▁end|> | connect(signalMapper, SIGNAL(mapped(QString)),
//! [1] //! [2] |
<|file_name|>daemon_windows.go<|end_file_name|><|fim▁begin|>package daemon
import (
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"runtime"
"strings"
"github.com/Sirupsen/logrus"
"github.com/docker/docker/container"
"github.com/docker/docker/daemon/graphdriver"
"github.com/docker/docker/dockerversion"
"github.com/docker/docker/image"
"github.com/docker/docker/layer"
"github.com/docker/docker/reference"
containertypes "github.com/docker/engine-api/types/container"
// register the windows graph driver
"github.com/docker/docker/daemon/graphdriver/windows"
"github.com/docker/docker/pkg/idtools"
"github.com/docker/docker/pkg/system"
"github.com/docker/libnetwork"
blkiodev "github.com/opencontainers/runc/libcontainer/configs"
)
const (
defaultVirtualSwitch = "Virtual Switch"
platformSupported = true
windowsMinCPUShares = 1
windowsMaxCPUShares = 10000
)
func getBlkioWeightDevices(config *containertypes.HostConfig) ([]*blkiodev.WeightDevice, error) {
return nil, nil
}
func parseSecurityOpt(container *container.Container, config *containertypes.HostConfig) error {
return nil
}
func getBlkioReadIOpsDevices(config *containertypes.HostConfig) ([]*blkiodev.ThrottleDevice, error) {
return nil, nil
}
func getBlkioWriteIOpsDevices(config *containertypes.HostConfig) ([]*blkiodev.ThrottleDevice, error) {
return nil, nil
}
func getBlkioReadBpsDevices(config *containertypes.HostConfig) ([]*blkiodev.ThrottleDevice, error) {
return nil, nil
}
func getBlkioWriteBpsDevices(config *containertypes.HostConfig) ([]*blkiodev.ThrottleDevice, error) {
return nil, nil
}
func setupInitLayer(initLayer string, rootUID, rootGID int) error {
return nil
}
func checkKernel() error {
return nil
}
// adaptContainerSettings is called during container creation to modify any
// settings necessary in the HostConfig structure.
func (daemon *Daemon) adaptContainerSettings(hostConfig *containertypes.HostConfig, adjustCPUShares bool) error {
if hostConfig == nil {
return nil
}
if hostConfig.CPUShares < 0 {
logrus.Warnf("Changing requested CPUShares of %d to minimum allowed of %d", hostConfig.CPUShares, windowsMinCPUShares)
hostConfig.CPUShares = windowsMinCPUShares
} else if hostConfig.CPUShares > windowsMaxCPUShares {
logrus.Warnf("Changing requested CPUShares of %d to maximum allowed of %d", hostConfig.CPUShares, windowsMaxCPUShares)
hostConfig.CPUShares = windowsMaxCPUShares
}
return nil
}
// verifyPlatformContainerSettings performs platform-specific validation of the
// hostconfig and config structures.
func verifyPlatformContainerSettings(daemon *Daemon, hostConfig *containertypes.HostConfig, config *containertypes.Config) ([]string, error) {
return nil, nil
}
// verifyDaemonSettings performs validation of daemon config struct
func verifyDaemonSettings(config *Config) error {
return nil
}
// checkSystem validates platform-specific requirements
func checkSystem() error {
// Validate the OS version. Note that docker.exe must be manifested for this
// call to return the correct version.
osv, err := system.GetOSVersion()
if err != nil {
return err
}
if osv.MajorVersion < 10 {
return fmt.Errorf("This version of Windows does not support the docker daemon")
}
if osv.Build < 10586 {
return fmt.Errorf("The Windows daemon requires Windows Server 2016 Technical Preview 4, build 10586 or later")
}
return nil
}
// configureKernelSecuritySupport configures and validate security support for the kernel
func configureKernelSecuritySupport(config *Config, driverName string) error {
return nil
}
// configureMaxThreads sets the Go runtime max threads threshold
func configureMaxThreads(config *Config) error {
return nil
}
func isBridgeNetworkDisabled(config *Config) bool {
return false
}
func (daemon *Daemon) initNetworkController(config *Config) (libnetwork.NetworkController, error) {
// Set the name of the virtual switch if not specified by -b on daemon start
if config.bridgeConfig.VirtualSwitchName == "" {
config.bridgeConfig.VirtualSwitchName = defaultVirtualSwitch
}
return nil, nil
}
// registerLinks sets up links between containers and writes the
// configuration out for persistence. As of Windows TP4, links are not supported.
func (daemon *Daemon) registerLinks(container *container.Container, hostConfig *containertypes.HostConfig) error {
return nil
}
func (daemon *Daemon) cleanupMounts() error {
return nil
}
func setupRemappedRoot(config *Config) ([]idtools.IDMap, []idtools.IDMap, error) {
return nil, nil, nil
}
func setupDaemonRoot(config *Config, rootDir string, rootUID, rootGID int) error {
config.Root = rootDir
// Create the root directory if it doesn't exists
if err := system.MkdirAll(config.Root, 0700); err != nil && !os.IsExist(err) {
return err
}
return nil
}
// conditionalMountOnStart is a platform specific helper function during the
// container start to call mount.
func (daemon *Daemon) conditionalMountOnStart(container *container.Container) error {
// We do not mount if a Hyper-V container
if !container.HostConfig.Isolation.IsHyperV() {
if err := daemon.Mount(container); err != nil {
return err
}
}
return nil
}
// conditionalUnmountOnCleanup is a platform specific helper function called
// during the cleanup of a container to unmount.
func (daemon *Daemon) conditionalUnmountOnCleanup(container *container.Container) {
// We do not unmount if a Hyper-V container
if !container.HostConfig.Isolation.IsHyperV() {
daemon.Unmount(container)
}
}
func restoreCustomImage(is image.Store, ls layer.Store, rs reference.Store) error {
type graphDriverStore interface {
GraphDriver() graphdriver.Driver
}
gds, ok := ls.(graphDriverStore)
if !ok {
return nil
}
driver := gds.GraphDriver()
wd, ok := driver.(*windows.Driver)
if !ok {
return nil
}
imageInfos, err := wd.GetCustomImageInfos()
if err != nil {
return err
}
// Convert imageData to valid image configuration
for i := range imageInfos {
name := strings.ToLower(imageInfos[i].Name)
type registrar interface {
RegisterDiffID(graphID string, size int64) (layer.Layer, error)
}
r, ok := ls.(registrar)
if !ok {
return errors.New("Layerstore doesn't support RegisterDiffID")
}
if _, err := r.RegisterDiffID(imageInfos[i].ID, imageInfos[i].Size); err != nil {
return err
}
// layer is intentionally not released
rootFS := image.NewRootFS()
rootFS.BaseLayer = filepath.Base(imageInfos[i].Path)
// Create history for base layer
config, err := json.Marshal(&image.Image{
V1Image: image.V1Image{
DockerVersion: dockerversion.Version,
Architecture: runtime.GOARCH,
OS: runtime.GOOS,
Created: imageInfos[i].CreatedTime,
},
RootFS: rootFS,
History: []image.History{},
})
<|fim▁hole|> }
ref, err := reference.WithTag(named, imageInfos[i].Version)
if err != nil {
return err
}
id, err := is.Create(config)
if err != nil {
return err
}
if err := rs.AddTag(ref, id, true); err != nil {
return err
}
logrus.Debugf("Registered base layer %s as %s", ref, id)
}
return nil
}<|fim▁end|> | named, err := reference.ParseNamed(name)
if err != nil {
return err |
<|file_name|>ClipboardUtil.java<|end_file_name|><|fim▁begin|>package io.mattw.youtube.commentsuite.util;
import java.awt.*;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.StringSelection;<|fim▁hole|>
public class ClipboardUtil {
private Clipboard systemClipboard;
public ClipboardUtil() {
}
private void initSystemClipboard() {
if (systemClipboard == null) {
systemClipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
}
}
private void setSystemClipboard(Clipboard clipboard) {
this.systemClipboard = clipboard;
}
public String getClipboard() throws UnsupportedFlavorException, IOException {
return (String) systemClipboard.getData(DataFlavor.stringFlavor);
}
/**
* Sets clipboard to string value.
*
* @param string text to set clipboard as
*/
public void setClipboard(String string) {
initSystemClipboard();
StringSelection selection = new StringSelection(string);
systemClipboard.setContents(selection, selection);
}
/**
* Converts list into a line.separator delimited string and sets to clipboard.
*
* @param list list of objects converted to line separated toString()
*/
public void setClipboard(List<?> list) {
List<String> strList = list.stream().map(Object::toString).collect(Collectors.toList());
setClipboard(strList.stream().collect(Collectors.joining(System.getProperty("line.separator"))));
}
/**
* Coverts object to string value and sets to clipboard.
*
* @param object uses toString() for clipboard
*/
public void setClipboard(Object object) {
setClipboard(object.toString());
}
}<|fim▁end|> | import java.awt.datatransfer.UnsupportedFlavorException;
import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors; |
<|file_name|>json.go<|end_file_name|><|fim▁begin|>/* Copyright 2016-2017 Vector Creations Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gomatrixserverlib
import (
"encoding/binary"
"fmt"
"sort"
"unicode/utf8"
"github.com/tidwall/gjson"
)
// CanonicalJSON re-encodes the JSON in a canonical encoding. The encoding is
// the shortest possible encoding using integer values with sorted object keys.
// https://matrix.org/docs/spec/server_server/unstable.html#canonical-json
func CanonicalJSON(input []byte) ([]byte, error) {
if !gjson.Valid(string(input)) {
return nil, fmt.Errorf("invalid json")
}
return CanonicalJSONAssumeValid(input), nil
}
// CanonicalJSONAssumeValid is the same as CanonicalJSON, but assumes the
// input is valid JSON
func CanonicalJSONAssumeValid(input []byte) []byte {
input = CompactJSON(input, make([]byte, 0, len(input)))
return SortJSON(input, make([]byte, 0, len(input)))
}
// SortJSON reencodes the JSON with the object keys sorted by lexicographically
// by codepoint. The input must be valid JSON.
func SortJSON(input, output []byte) []byte {
result := gjson.ParseBytes(input)
RawJSON := RawJSONFromResult(result, input)
return sortJSONValue(result, RawJSON, output)
}
// sortJSONValue takes a gjson.Result and sorts it. inputJSON must be the
// raw JSON bytes that gjson.Result points to.
func sortJSONValue(input gjson.Result, inputJSON, output []byte) []byte {
if input.IsArray() {
return sortJSONArray(input, inputJSON, output)
}
if input.IsObject() {
return sortJSONObject(input, inputJSON, output)
}
// If its neither an object nor an array then there is no sub structure
// to sort, so just append the raw bytes.
return append(output, inputJSON...)
}
// sortJSONArray takes a gjson.Result and sorts it, assuming its an array.
// inputJSON must be the raw JSON bytes that gjson.Result points to.
func sortJSONArray(input gjson.Result, inputJSON, output []byte) []byte {
sep := byte('[')
// Iterate over each value in the array and sort it.
input.ForEach(func(_, value gjson.Result) bool {
output = append(output, sep)
sep = ','
RawJSON := RawJSONFromResult(value, inputJSON)
output = sortJSONValue(value, RawJSON, output)
return true // keep iterating
})
if sep == '[' {
// If sep is still '[' then the array was empty and we never wrote the
// initial '[', so we write it now along with the closing ']'.
output = append(output, '[', ']')
} else {
// Otherwise we end the array by writing a single ']'
output = append(output, ']')
}
return output
}
// sortJSONObject takes a gjson.Result and sorts it, assuming its an object.
// inputJSON must be the raw JSON bytes that gjson.Result points to.
func sortJSONObject(input gjson.Result, inputJSON, output []byte) []byte {
type entry struct {
key string // The parsed key string
rawKey []byte // The raw, unparsed key JSON string
value gjson.Result
}
var entries []entry
// Iterate over each key/value pair and add it to a slice
// that we can sort
input.ForEach(func(key, value gjson.Result) bool {
entries = append(entries, entry{
key: key.String(),
rawKey: RawJSONFromResult(key, inputJSON),
value: value,
})
return true // keep iterating
})
// Sort the slice based on the *parsed* key
sort.Slice(entries, func(a, b int) bool {
return entries[a].key < entries[b].key
})
sep := byte('{')
for _, entry := range entries {
output = append(output, sep)
sep = ','
// Append the raw unparsed JSON key, *not* the parsed key
output = append(output, entry.rawKey...)
output = append(output, ':')
RawJSON := RawJSONFromResult(entry.value, inputJSON)
output = sortJSONValue(entry.value, RawJSON, output)
}
if sep == '{' {
// If sep is still '{' then the object was empty and we never wrote the
// initial '{', so we write it now along with the closing '}'.
output = append(output, '{', '}')
} else {
// Otherwise we end the object by writing a single '}'
output = append(output, '}')
}
return output
}
// CompactJSON makes the encoded JSON as small as possible by removing
// whitespace and unneeded unicode escapes
func CompactJSON(input, output []byte) []byte {
var i int
for i < len(input) {
c := input[i]
i++
// The valid whitespace characters are all less than or equal to SPACE 0x20.
// The valid non-white characters are all greater than SPACE 0x20.
// So we can check for whitespace by comparing against SPACE 0x20.
if c <= ' ' {
// Skip over whitespace.
continue
}
// Add the non-whitespace character to the output.
output = append(output, c)
if c == '"' {
// We are inside a string.
for i < len(input) {
c = input[i]
i++
// Check if this is an escape sequence.
if c == '\\' {
escape := input[i]
i++
if escape == 'u' {
// If this is a unicode escape then we need to handle it specially
output, i = compactUnicodeEscape(input, output, i)
} else if escape == '/' {
// JSON does not require escaping '/', but allows encoders to escape it as a special case.
// Since the escape isn't required we remove it.
output = append(output, escape)
} else {
// All other permitted escapes are single charater escapes that are already in their shortest form.
output = append(output, '\\', escape)
}
} else {
output = append(output, c)
}
if c == '"' {
break
}
}
}
}
return output
}
// compactUnicodeEscape unpacks a 4 byte unicode escape starting at index.
// If the escape is a surrogate pair then decode the 6 byte \uXXXX escape
// that follows. Returns the output slice and a new input index.
func compactUnicodeEscape(input, output []byte, index int) ([]byte, int) {
const (
ESCAPES = "uuuuuuuubtnufruuuuuuuuuuuuuuuuuu"
HEX = "0123456789ABCDEF"<|fim▁hole|> // If there aren't enough bytes to decode the hex escape then return.
if len(input)-index < 4 {
return output, len(input)
}
// Decode the 4 hex digits.
c := readHexDigits(input[index:])
index += 4
if c < ' ' {
// If the character is less than SPACE 0x20 then it will need escaping.
escape := ESCAPES[c]
output = append(output, '\\', escape)
if escape == 'u' {
output = append(output, '0', '0', byte('0'+(c>>4)), HEX[c&0xF])
}
} else if c == '\\' || c == '"' {
// Otherwise the character only needs escaping if it is a QUOTE '"' or BACKSLASH '\\'.
output = append(output, '\\', byte(c))
} else if c < 0xD800 || c >= 0xE000 {
// If the character isn't a surrogate pair then encoded it directly as UTF-8.
var buffer [4]byte
n := utf8.EncodeRune(buffer[:], rune(c))
output = append(output, buffer[:n]...)
} else {
// Otherwise the escaped character was the first part of a UTF-16 style surrogate pair.
// The next 6 bytes MUST be a '\uXXXX'.
// If there aren't enough bytes to decode the hex escape then return.
if len(input)-index < 6 {
return output, len(input)
}
// Decode the 4 hex digits from the '\uXXXX'.
surrogate := readHexDigits(input[index+2:])
index += 6
// Reconstruct the UCS4 codepoint from the surrogates.
codepoint := 0x10000 + (((c & 0x3FF) << 10) | (surrogate & 0x3FF))
// Encode the charater as UTF-8.
var buffer [4]byte
n := utf8.EncodeRune(buffer[:], rune(codepoint))
output = append(output, buffer[:n]...)
}
return output, index
}
// Read 4 hex digits from the input slice.
// Taken from https://github.com/NegativeMjark/indolentjson-rust/blob/8b959791fe2656a88f189c5d60d153be05fe3deb/src/readhex.rs#L21
func readHexDigits(input []byte) uint32 {
hex := binary.BigEndian.Uint32(input)
// subtract '0'
hex -= 0x30303030
// strip the higher bits, maps 'a' => 'A'
hex &= 0x1F1F1F1F
mask := hex & 0x10101010
// subtract 'A' - 10 - '9' - 9 = 7 from the letters.
hex -= mask >> 1
hex += mask >> 4
// collect the nibbles
hex |= hex >> 4
hex &= 0xFF00FF
hex |= hex >> 8
return hex & 0xFFFF
}
// RawJSONFromResult extracts the raw JSON bytes pointed to by result.
// input must be the json bytes that were used to generate result
func RawJSONFromResult(result gjson.Result, input []byte) (RawJSON []byte) {
// This is lifted from gjson README. Basically, result.Raw is a copy of
// the bytes we want, but its more efficient to take a slice.
// If Index is 0 then for some reason we can't extract it from the original
// JSON bytes.
if result.Index > 0 {
RawJSON = input[result.Index : result.Index+len(result.Raw)]
} else {
RawJSON = []byte(result.Raw)
}
return
}<|fim▁end|> | ) |
<|file_name|>multi.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
/// Validator set changing at fork blocks.
use std::collections::BTreeMap;
use std::sync::Weak;
use bigint::hash::H256;
use parking_lot::RwLock;
use util::Address;
use bytes::Bytes;
use ids::BlockId;
use header::{BlockNumber, Header};
use client::EngineClient;
use machine::{AuxiliaryData, Call, EthereumMachine};
use super::{SystemCall, ValidatorSet};
type BlockNumberLookup = Box<Fn(BlockId) -> Result<BlockNumber, String> + Send + Sync + 'static>;
pub struct Multi {
sets: BTreeMap<BlockNumber, Box<ValidatorSet>>,
block_number: RwLock<BlockNumberLookup>,
}
impl Multi {
pub fn new(set_map: BTreeMap<BlockNumber, Box<ValidatorSet>>) -> Self {
assert!(set_map.get(&0u64).is_some(), "ValidatorSet has to be specified from block 0.");
Multi {
sets: set_map,
block_number: RwLock::new(Box::new(move |_| Err("No client!".into()))),
}
}
fn correct_set(&self, id: BlockId) -> Option<&ValidatorSet> {
match self.block_number.read()(id).map(|parent_block| self.correct_set_by_number(parent_block)) {
Ok((_, set)) => Some(set),
Err(e) => {
debug!(target: "engine", "ValidatorSet could not be recovered: {}", e);
None
},
}
}
// get correct set by block number, along with block number at which
// this set was activated.
fn correct_set_by_number(&self, parent_block: BlockNumber) -> (BlockNumber, &ValidatorSet) {
let (block, set) = self.sets.iter()
.rev()
.find(|&(block, _)| *block <= parent_block + 1)
.expect("constructor validation ensures that there is at least one validator set for block 0;
block 0 is less than any uint;
qed");
trace!(target: "engine", "Multi ValidatorSet retrieved for block {}.", block);
(*block, &**set)
}
}
impl ValidatorSet for Multi {
fn default_caller(&self, block_id: BlockId) -> Box<Call> {
self.correct_set(block_id).map(|set| set.default_caller(block_id))
.unwrap_or(Box::new(|_, _| Err("No validator set for given ID.".into())))
}
fn on_epoch_begin(&self, _first: bool, header: &Header, call: &mut SystemCall) -> Result<(), ::error::Error> {
let (set_block, set) = self.correct_set_by_number(header.number());
let first = set_block == header.number();
set.on_epoch_begin(first, header, call)
}
fn genesis_epoch_data(&self, header: &Header, call: &Call) -> Result<Vec<u8>, String> {
self.correct_set_by_number(0).1.genesis_epoch_data(header, call)
}
fn is_epoch_end(&self, _first: bool, chain_head: &Header) -> Option<Vec<u8>> {
let (set_block, set) = self.correct_set_by_number(chain_head.number());
let first = set_block == chain_head.number();
set.is_epoch_end(first, chain_head)
}
fn signals_epoch_end(&self, _first: bool, header: &Header, aux: AuxiliaryData)
-> ::engines::EpochChange<EthereumMachine>
{
let (set_block, set) = self.correct_set_by_number(header.number());
let first = set_block == header.number();
set.signals_epoch_end(first, header, aux)
}
fn epoch_set(&self, _first: bool, machine: &EthereumMachine, number: BlockNumber, proof: &[u8]) -> Result<(super::SimpleList, Option<H256>), ::error::Error> {
let (set_block, set) = self.correct_set_by_number(number);
let first = set_block == number;
set.epoch_set(first, machine, number, proof)
}
fn contains_with_caller(&self, bh: &H256, address: &Address, caller: &Call) -> bool {
self.correct_set(BlockId::Hash(*bh))
.map_or(false, |set| set.contains_with_caller(bh, address, caller))
}
fn get_with_caller(&self, bh: &H256, nonce: usize, caller: &Call) -> Address {
self.correct_set(BlockId::Hash(*bh))
.map_or_else(Default::default, |set| set.get_with_caller(bh, nonce, caller))
}
fn count_with_caller(&self, bh: &H256, caller: &Call) -> usize {
self.correct_set(BlockId::Hash(*bh))
.map_or_else(usize::max_value, |set| set.count_with_caller(bh, caller))
}
fn report_malicious(&self, validator: &Address, set_block: BlockNumber, block: BlockNumber, proof: Bytes) {
self.correct_set_by_number(set_block).1.report_malicious(validator, set_block, block, proof);
}
fn report_benign(&self, validator: &Address, set_block: BlockNumber, block: BlockNumber) {
self.correct_set_by_number(set_block).1.report_benign(validator, set_block, block);
}
fn register_client(&self, client: Weak<EngineClient>) {
for set in self.sets.values() {
set.register_client(client.clone());
}
*self.block_number.write() = Box::new(move |id| client
.upgrade()
.ok_or("No client!".into())
.and_then(|c| c.block_number(id).ok_or("Unknown block".into())));
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use std::collections::BTreeMap;
use hash::keccak;
use account_provider::AccountProvider;
use client::BlockChainClient;
use engines::EpochChange;
use engines::validator_set::ValidatorSet;
use ethkey::Secret;
use header::Header;
use miner::MinerService;
use spec::Spec;
use tests::helpers::{generate_dummy_client_with_spec_and_accounts, generate_dummy_client_with_spec_and_data};
use types::ids::BlockId;
use util::*;
use super::Multi;
#[test]
fn uses_current_set() {
let tap = Arc::new(AccountProvider::transient_provider());
let s0: Secret = keccak("0").into();
let v0 = tap.insert_account(s0.clone(), "").unwrap();
let v1 = tap.insert_account(keccak("1").into(), "").unwrap();
let client = generate_dummy_client_with_spec_and_accounts(Spec::new_validator_multi, Some(tap));
client.engine().register_client(Arc::downgrade(&client) as _);
// Make sure txs go through.
client.miner().set_gas_floor_target(1_000_000.into());
// Wrong signer for the first block.
client.miner().set_engine_signer(v1, "".into()).unwrap();
client.transact_contract(Default::default(), Default::default()).unwrap();
::client::EngineClient::update_sealing(&*client);
assert_eq!(client.chain_info().best_block_number, 0);
// Right signer for the first block.
client.miner().set_engine_signer(v0, "".into()).unwrap();
::client::EngineClient::update_sealing(&*client);
assert_eq!(client.chain_info().best_block_number, 1);
// This time v0 is wrong.
client.transact_contract(Default::default(), Default::default()).unwrap();
::client::EngineClient::update_sealing(&*client);
assert_eq!(client.chain_info().best_block_number, 1);
client.miner().set_engine_signer(v1, "".into()).unwrap();
::client::EngineClient::update_sealing(&*client);
assert_eq!(client.chain_info().best_block_number, 2);
// v1 is still good.
client.transact_contract(Default::default(), Default::default()).unwrap();
::client::EngineClient::update_sealing(&*client);
assert_eq!(client.chain_info().best_block_number, 3);<|fim▁hole|> sync_client.engine().register_client(Arc::downgrade(&sync_client) as _);
for i in 1..4 {
sync_client.import_block(client.block(BlockId::Number(i)).unwrap().into_inner()).unwrap();
}
sync_client.flush_queue();
assert_eq!(sync_client.chain_info().best_block_number, 3);
}
#[test]
fn transition_to_fixed_list_instant() {
use super::super::SimpleList;
let mut map: BTreeMap<_, Box<ValidatorSet>> = BTreeMap::new();
let list1: Vec<_> = (0..10).map(|_| Address::random()).collect();
let list2 = {
let mut list = list1.clone();
list.push(Address::random());
list
};
map.insert(0, Box::new(SimpleList::new(list1)));
map.insert(500, Box::new(SimpleList::new(list2)));
let multi = Multi::new(map);
let mut header = Header::new();
header.set_number(499);
match multi.signals_epoch_end(false, &header, Default::default()) {
EpochChange::No => {},
_ => panic!("Expected no epoch signal change."),
}
assert!(multi.is_epoch_end(false, &header).is_none());
header.set_number(500);
match multi.signals_epoch_end(false, &header, Default::default()) {
EpochChange::No => {},
_ => panic!("Expected no epoch signal change."),
}
assert!(multi.is_epoch_end(false, &header).is_some());
}
}<|fim▁end|> |
// Check syncing.
let sync_client = generate_dummy_client_with_spec_and_data(Spec::new_validator_multi, 0, 0, &[]); |
<|file_name|>microworld.js<|end_file_name|><|fim▁begin|>'use strict';
var maxBot = 11;
var mode;
var mw = {};
var mwId;
function getMwId() {
mwId = $.url().segment(4);
}
function isNewMicroworld() {
return ($.url().segment(3) === 'new');
}
function showStatusTableOptions() {
var behaviour_name = $(this).attr('id');
var behaviour_type = $(this).text();
$(this).closest('.dropdown').find('span#btn_txt').text(behaviour_type+" ")
$("."+behaviour_name).removeClass('hide');
if(behaviour_name == "static_option"){
$(".dynamic_option").addClass('hide');
} else {
$(".static_option").addClass('hide');
}
}
function readyTooltips() {
$('#early-end-tooltip').tooltip();
$('#max-fish-tooltip').tooltip();
$('#available-mystery-tooltip').tooltip();
$('#reported-mystery-tooltip').tooltip();
$('#spawn-factor-tooltip').tooltip();
$('#chance-catch-tooltip').tooltip();
$('#show-fisher-status-tooltip').tooltip();
$('#erratic-tooltip').tooltip();
$('#greed-tooltip').tooltip();
$('#greed-spread-tooltip').tooltip();
$('#trend-tooltip').tooltip();
$('#predictability-tooltip').tooltip();
$('#prob-action-tooltip').tooltip();
$('#attempts-second-tooltip').tooltip();
}
function changeBotRowVisibility() {
var numFishers = parseInt($('#num-fishers').val(), 10);
var numHumans = parseInt($('#num-humans').val(), 10);
if (numFishers < 1) numFishers = 1;
if (numFishers > maxBot + numHumans) {
numFishers = maxBot + numHumans;
}
if (numHumans > numFishers) numHumans = numFishers;
for (var i = 1; i <= numFishers - numHumans; i++) {
$('#bot-' + i + '-row').removeClass('collapse');
}
for (var i = numFishers - numHumans + 1; i <= maxBot; i++) {
$('#bot-' + i + '-row').addClass('collapse');
}
}
function changeGreedUniformity() {
if ($('#uniform-greed').prop('checked') === true) {
var greed = $('#bot-1-greed').val();
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-greed').val(greed).attr('disabled', true);
}
} else {
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-greed').attr('disabled', false);
}
}
}
function changeGreedSpreadUniformity() {
if ($('#uniform-greed-spread').prop('checked') === true) {
var greedSpread = $('#bot-1-greed-spread').val();
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-greed-spread').val(greedSpread).attr('disabled', true);
}
} else {
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-greedSpread').attr('disabled', false);
}
}
}
function changeTrendUniformity() {
if ($('#uniform-trend').prop('checked') === true) {
var trend = $('#bot-1-trend').val();
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-trend').val(trend).attr('disabled', true);
}
} else {
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-trend').attr('disabled', false);
}
}
}
function changePredictabilityUniformity() {
if ($('#uniform-predictability').prop('checked') === true) {
var predictability = $('#bot-1-predictability').val();
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-predictability').val(predictability).attr('disabled', true);
}
} else {
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-predictability').attr('disabled', false);
}
}
}
function changeProbActionUniformity() {
if ($('#uniform-prob-action').prop('checked') === true) {
var probAction = $('#bot-1-prob-action').val();
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-prob-action').val(probAction).attr('disabled', true);
}
} else {
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-prob-action').attr('disabled', false);
}
}
}
function changeAttemptsSecondUniformity() {
if ($('#uniform-attempts-second').prop('checked') === true) {
var attemptsSecond = $('#bot-1-attempts-second').val();
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-attempts-second').val(attemptsSecond).attr('disabled', true);
}
} else {
for (var i = 2; i <= maxBot; i++) {
$('#bot-' + i + '-attempts-second').attr('disabled', false);
}
}
}
function validate() {
var errors = [];
if ($('#name').val().length < 1) {
errors.push('The microworld name is missing.');
}
var numFishers = parseInt($('#num-fishers').val(), 10);
if (numFishers < 1) {
errors.push('There must be at least one fisher per simulation');
}
if (numFishers > 12) {
errors.push('The maximum number of fishers per simulation is twelve.');
}
var numHumans = parseInt($('#num-humans').val(), 10);
if (numHumans < 0) {
errors.push('There must be zero or more humans per simulation.');
}
if (numHumans > numFishers) {
errors.push('There cannot be more human fishers than total fishers.');
}
if (parseInt($('#num-seasons').val(), 10) < 1) {
errors.push('There must be at least one season per simulation.');
}
if (parseInt($('#season-duration').val(), 10) < 1) {
errors.push('Seasons must have a duration of at least one second.');
}
if (parseInt($('#initial-delay').val(), 10) < 1) {
errors.push('The initial delay must be at least one second long.');
}
if (parseInt($('#season-delay').val(), 10) < 1) {
errors.push('The delay between seasons must be at least one second.');
}
if (parseFloat($('#fish-value').val()) < 0) {
errors.push('The value per fish cannot be negative');
}
if (parseFloat($('#cost-cast').val()) < 0) {
errors.push('The cost to attempt to fish cannot be negative.');
}
if (parseFloat($('#cost-departure').val()) < 0) {
errors.push('The cost to set sail cannot be negative.');
}
if (parseFloat($('#cost-second').val()) < 0) {
errors.push('The cost per second at sea cannot be negative.');
}
var certainFish = parseInt($('#certain-fish').val(), 10);
if (certainFish < 1) {
errors.push('There must be at least one initial fish.');
}
var availMysteryFish = parseInt($('#available-mystery-fish').val(), 10);
if (availMysteryFish < 0) {
errors.push('The number of available mystery fish cannot be negative');
}
var repMysteryFish = parseInt($('#reported-mystery-fish').val(), 10);
if (repMysteryFish < availMysteryFish) {
errors.push('The number of reported mystery fish must be equal or ' +
'greater than the number of actually available mystery fish.');
}
var maxFish = parseInt($('#max-fish').val(), 10);
if (maxFish < certainFish + availMysteryFish) {
errors.push('The maximum fish capacity must be equal or greater ' +
'than the sum of certain and available mystery fish.');
}
if (parseFloat($('#spawn-factor').val()) < 0) {
errors.push('The spawn factor cannot be negative.');
}
var chanceCatch = parseFloat($('#chance-catch').val());
if (chanceCatch < 0 || chanceCatch > 1) {
errors.push('The chance of catch must be a number between 0 and 1.');
}
if ($('#preparation-text').val().length < 1) {
errors.push('The preparation text is missing.');
}
if ($('#end-time-text').val().length < 1) {
errors.push('The text for ending on time is missing.');
}
if ($('#end-depletion-text').val().length < 1) {
errors.push('The text for ending on depletion is missing.');
}
for (var i = 1; i <= (numFishers - numHumans); i++) {
if ($('#bot-' + i + '-name').val().length < 1) {
errors.push('Bot ' + i + ' needs a name.');
}
var botGreed = parseFloat($('#bot-' + i + '-greed').val());
if (botGreed < 0 || botGreed > 1) {
errors.push('The greed of bot ' + i + ' must be between 0 and 1.');
}
var botGreedSpread = parseFloat($('#bot-' + i + '-greed-spread').val());
if (botGreedSpread < 0) {
errors.push('The greed spread of bot ' + i + ' must be greater than 0.');
}
if (botGreedSpread > 2 * botGreed) {
errors.push('The greed spread of bot ' + i + ' must be less than twice its greed.');
}
var botProbAction = parseFloat($('#bot-' + i + '-prob-action').val());
if (botProbAction < 0 || botProbAction > 1) {
errors.push('The probability of action of bot ' + i +
' must be between 0 and 1.');
}
var botAttempts = parseFloat($('#bot-' + i + '-attempts-second').val());
if (botAttempts < 1) {
errors.push('The attempts per second of bot ' + i +
' must be between at least 1.');
}
}
if (errors.length === 0) return null;
return errors;
}
function prepareMicroworldObject() {
var mw = {};
mw.name = $('#name').val();
mw.desc = $('#desc').val();
mw.numFishers = $('#num-fishers').val();
mw.numHumans = $('#num-humans').val();
mw.numSeasons = $('#num-seasons').val();
mw.seasonDuration = $('#season-duration').val();
mw.initialDelay = $('#initial-delay').val();
mw.seasonDelay = $('#season-delay').val();
mw.enablePause = $('#enable-pause').prop('checked');
mw.enableEarlyEnd = $('#enable-early-end').prop('checked');
mw.enableTutorial = $('#enable-tutorial').prop('checked');
mw.enableRespawnWarning = $('#change-ocean-colour').prop('checked');
mw.fishValue = $('#fish-value').val();
mw.costCast = $('#cost-cast').val();
mw.costDeparture = $('#cost-departure').val();
mw.costSecond = $('#cost-second').val();
mw.currencySymbol = $('#currency-symbol').val();
mw.certainFish = $('#certain-fish').val();
mw.availableMysteryFish = $('#available-mystery-fish').val();
mw.reportedMysteryFish = $('#reported-mystery-fish').val();
mw.maxFish = $('#max-fish').val();
mw.spawnFactor = $('#spawn-factor').val();
mw.chanceCatch = $('#chance-catch').val();
mw.showFishers = $('#show-fishers').prop('checked');
mw.showFisherNames = $('#show-fisher-names').prop('checked');
mw.showFisherStatus = $('#show-fisher-status').prop('checked');
mw.showNumCaught = $('#show-num-caught').prop('checked');
mw.showFisherBalance = $('#show-fisher-balance').prop('checked');
mw.preparationText = $('#preparation-text').val();
mw.endTimeText = $('#end-time-text').val();
mw.endDepletionText = $('#end-depletion-text').val();
mw.bots = [];
for (var i = 1; i <= mw.numFishers - mw.numHumans; i++) {
var botPrefix = '#bot-' + i + '-';
mw.bots.push({
name: $(botPrefix + 'name').val(),
greed: $(botPrefix + 'greed').val(),
greedSpread: $(botPrefix + 'greed-spread').val(),
trend: $(botPrefix + 'trend').val(),
predictability: $(botPrefix + 'predictability').val(),
probAction: $(botPrefix + 'prob-action').val(),
attemptsSecond: $(botPrefix + 'attempts-second').val()
});
}
mw.oceanOrder = $("input[name=ocean_order]:checked").val();
return mw;
}
function reportErrors(err) {
var errMessage = 'The form has the following errors:\n\n';
for (var i in err) {
errMessage += err[i] + '\n';
}
alert(errMessage);
return;
}
function badMicroworld(jqXHR) {
reportErrors(JSON.parse(jqXHR.responseText).errors);
return;
}
function goodMicroworld() {
location.href = '../dashboard';
}
function createMicroworld() {
var err = validate();
if (err) {
reportErrors(err);
return;
}
var mw = prepareMicroworldObject();
$.ajax({
type: 'POST',
url: '/microworlds',
data: mw,
error: badMicroworld,
success: goodMicroworld<|fim▁hole|> var err = validate();
if (err) {
reportErrors(err);
return;
}
var mw = prepareMicroworldObject();
mw.clone = true;
$.ajax({
type: 'POST',
url: '/microworlds',
data: mw,
error: badMicroworld,
success: goodMicroworld
});
}
function updateMicroworld(changeTo) {
var err = validate();
if (err) {
reportErrors(err);
return;
}
var mw = prepareMicroworldObject();
if (changeTo) mw.changeTo = changeTo;
$.ajax({
type: 'PUT',
url: '/microworlds/' + mwId,
data: mw,
error: badMicroworld,
success: goodMicroworld
});
}
function saveMicroworld() {
updateMicroworld();
}
function activateMicroworld() {
updateMicroworld('active');
}
function archiveMicroworld() {
updateMicroworld('archived');
}
function deleteMicroworld() {
$.ajax({
type: 'DELETE',
url: '/microworlds/' + mwId,
error: badMicroworld,
success: goodMicroworld
});
}
function populatePage() {
$('#name').val(mw.name);
$('#desc').val(mw.desc);
$('#num-fishers').val(mw.params.numFishers);
$('#num-humans').val(mw.params.numHumans);
$('#num-seasons').val(mw.params.numSeasons);
$('#season-duration').val(mw.params.seasonDuration);
$('#initial-delay').val(mw.params.initialDelay);
$('#season-delay').val(mw.params.seasonDelay);
$('#enable-pause').prop('checked', mw.params.enablePause);
$('#enable-early-end').prop('checked', mw.params.enableEarlyEnd);
$('#enable-tutorial').prop('checked', mw.params.enableTutorial);
$('#change-ocean-colour').prop('checked', mw.params.enableRespawnWarning);
$('#fish-value').val(mw.params.fishValue);
$('#cost-cast').val(mw.params.costCast);
$('#cost-departure').val(mw.params.costDeparture);
$('#cost-second').val(mw.params.costSecond);
$('#currency-symbol').val(mw.params.currencySymbol);
$('#certain-fish').val(mw.params.certainFish);
$('#available-mystery-fish').val(mw.params.availableMysteryFish);
$('#reported-mystery-fish').val(mw.params.reportedMysteryFish);
$('#max-fish').val(mw.params.maxFish);
$('#spawn-factor').val(mw.params.spawnFactor);
$('#chance-catch').val(mw.params.chanceCatch);
$('#preparation-text').val(mw.params.preparationText);
$('#end-time-text').val(mw.params.endTimeText);
$('#end-depletion-text').val(mw.params.endDepletionText);
$('#show-fishers').prop('checked', mw.params.showFishers);
$('#show-fisher-names').prop('checked', mw.params.showFisherNames);
$('#show-fisher-status').prop('checked', mw.params.showFisherStatus);
$('#show-num-caught').prop('checked', mw.params.showNumCaught);
$('#show-fisher-balance').prop('checked', mw.params.showFisherBalance);
$('#uniform-greed').prop('checked', false);
$('#uniform-greed-spread').prop('checked', false);
$('#uniform-trend').prop('checked', false);
$('#uniform-predictability').prop('checked', false);
$('#uniform-prob-action').prop('checked', false);
$('#uniform-attempts-second').prop('checked', false);
for (var i = 1; i <= mw.params.numFishers - mw.params.numHumans; i++) {
var botPrefix = '#bot-' + i + '-';
$(botPrefix + 'name').val(mw.params.bots[i - 1].name);
$(botPrefix + 'greed').val(mw.params.bots[i - 1].greed);
$(botPrefix + 'greed-spread').val(mw.params.bots[i - 1].greedSpread);
$(botPrefix + 'trend').val(mw.params.bots[i - 1].trend);
$(botPrefix + 'predictability').val(mw.params.bots[i - 1].predictability);
$(botPrefix + 'prob-action').val(mw.params.bots[i - 1].probAction);
$(botPrefix + 'attempts-second').val(mw.params.bots[i - 1].attemptsSecond);
}
$("#"+mw.params.oceanOrder).prop('checked', true);
changeBotRowVisibility();
}
function noMicroworld(jqXHR) {
alert(jqXHR.responseText);
}
function gotMicroworld(m) {
mw = m;
mode = mw.status;
populatePage();
prepareControls();
}
function getMicroworld() {
$.ajax({
type: 'GET',
url: '/microworlds/' + mwId,
error: noMicroworld,
success: gotMicroworld
});
}
function noRuns(jqXHR) {
alert(jqXHR.responseText);
}
function gotRuns(r) {
var table = '';
for (var i in r) {
var button = '<button class="btn btn-sm btn-info" type="submit" onclick=location.href=\'/runs/' + r[i]._id +
'?csv=true\'>Download <span class="glyphicon glyphicon-download-alt"></span></button>';
table += '<tr><td><a href="../runs/' + r[i]._id + '">' + moment(r[i].time).format('llll') + '</a></td>' +
'<td>' + r[i].participants + '</td>' + '<td>' + button + '</tr>';
}
$('#microworld-runs-table-rows').html(table);
// enabled or disable the download all button depending on if there are any completed runs
if (r.length == 0) {
$('#download-all-button').attr("disabled", "disabled");
} else {
$('#download-all-button').removeAttr("disabled");
}
setTimeout(getRuns, 60000);
}
function getRuns() {
$.ajax({
type: 'GET',
url: '/runs/?mw=' + mwId,
error: noRuns,
success: gotRuns
});
}
function backToList() {
location.href = '../dashboard';
}
// Makes downloading all runs possible
function initDownloadAll() {
$('#download-all-button').attr("onclick", "location.href='/runs?csv=true&mw="+mwId+"'");
}
function setButtons() {
$('#create').click(createMicroworld);
$('#create-2').click(createMicroworld);
$('#save').click(saveMicroworld);
$('#save-2').click(saveMicroworld);
$('#cancel').click(backToList);
$('#cancel-2').click(backToList);
$('#clone-confirmed').click(cloneMicroworld)
$('#activate-confirmed').click(activateMicroworld);
$('#archive-confirmed').click(archiveMicroworld);
$('#delete-confirmed').click(deleteMicroworld);
$(".behaviour_group_select").click(showStatusTableOptions);
initDownloadAll();
}
function setOnPageChanges() {
$('#num-fishers').on('change', changeBotRowVisibility);
$('#num-humans').on('change', changeBotRowVisibility);
$('#uniform-greed').on('change', changeGreedUniformity);
$('#bot-1-greed').on('input', changeGreedUniformity);
$('#uniform-greed-spread').on('change', changeGreedSpreadUniformity);
$('#bot-1-greed-spread').on('input', changeGreedSpreadUniformity);
$('#uniform-trend').on('change', changeTrendUniformity);
$('#bot-1-trend').on('change', changeTrendUniformity);
$('#uniform-predictability').on('change', changePredictabilityUniformity);
$('#bot-1-predictability').on('change', changePredictabilityUniformity);
$('#uniform-prob-action').on('change', changeProbActionUniformity);
$('#bot-1-prob-action').on('input', changeProbActionUniformity);
$('#uniform-attempts-second').on('change', changeAttemptsSecondUniformity);
$('#bot-1-attempts-second').on('input', changeAttemptsSecondUniformity);
}
function loadTexts() {
$('#preparation-text').val(prepText);
$('#end-time-text').val(endTimeText);
$('#end-depletion-text').val(endDepletedText);
}
function prepareControls() {
$('#microworld-panel-body-text').text(panelBody[mode]);
$('#microworld-panel-2-body-text').text(panelBody[mode]);
if (mode === 'new') {
$('#microworld-header').text(pageHeader[mode]);
$('#microworld-panel-title').text(panelTitle[mode]);
$('#microworld-panel-2-title').text(panelTitle[mode]);
loadTexts();
$('#create').removeClass('collapse');
$('#create-2').removeClass('collapse');
$("#ocean_order_user_top").prop("checked", true);
uniformityChanges();
} else if (mode === 'test') {
$('title').text('Fish - Microworld in Test');
$('#microworld-header').text(pageHeader[mode] + mw.code);
$('#microworld-panel-title').text(panelTitle[mode] + mw.code);
$('#microworld-panel-2-title').text(panelTitle[mode] + mw.code);
$('#save').removeClass('collapse');
$('#save-2').removeClass('collapse');
$('#clone').removeClass('collapse');
$('#clone-2').removeClass('collapse');
$('#activate').removeClass('collapse');
$('#activate-2').removeClass('collapse');
$('#delete').removeClass('collapse');
$('#delete-2').removeClass('collapse');
if($('input[type="radio"]:checked').parent().parent().hasClass('dynamic_option')) {
$(".static_option").addClass('hide');
$(".dynamic_option").removeClass("hide");
$('span#btn_txt').text("Dynamic Behaviour\xa0\xa0"); //\xa0 is the char makes
}
uniformityChanges();
} else if (mode === 'active') {
$('title').text('Fish - Active Microworld');
$('#microworld-header').text(pageHeader[mode] + mw.code);
$('#microworld-panel-title').text(panelTitle[mode] + mw.code);
$('#microworld-panel-2-title').text(panelTitle[mode] + mw.code);
$('#clone').removeClass('collapse');
$('#clone-2').removeClass('collapse');
$('#archive').removeClass('collapse');
$('#archive-2').removeClass('collapse');
$('#delete').removeClass('collapse');
$('#delete-2').removeClass('collapse');
$('.to-disable').each( function() {
$(this).prop('disabled', true);
});
$('#results').removeClass('collapse');
$(".dynamic_option").removeClass("hide");
} else if (mode === 'archived') {
$('title').text('Fish - Archived Microworld');
$('#microworld-header').text(pageHeader[mode]);
$('#microworld-panel-title').text(panelTitle[mode]);
$('#microworld-panel-2-title').text(panelTitle[mode]);
$('#clone').removeClass('collapse');
$('#clone-2').removeClass('collapse');
$('#activate').removeClass('collapse');
$('#activate-2').removeClass('collapse');
$('#delete').removeClass('collapse');
$('#delete-2').removeClass('collapse');
$('.to-disable').each( function() {
$(this).prop('disabled', true);
});
$('#results').removeClass('collapse');
$(".dynamic_option").removeClass("hide");
}
}
function loadData() {
if (isNewMicroworld()) {
mode = 'new';
prepareControls();
} else {
getMicroworld(); // will eventually call prepareControls()
getRuns();
}
}
function uniformityChanges() {
changeGreedUniformity();
changeGreedSpreadUniformity();
changeTrendUniformity();
changePredictabilityUniformity();
changeProbActionUniformity();
changeAttemptsSecondUniformity();
}
function main() {
getMwId();
isNewMicroworld()
readyTooltips();
setButtons();
setOnPageChanges();
loadData();
}
$(document).ready(main);<|fim▁end|> | });
}
function cloneMicroworld() { |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! Python bindings for native blackbox logging.
use std::ops::Deref;
use blackbox::event::Event;
use blackbox::init;
use blackbox::log;
use blackbox::serde_json;
use blackbox::BlackboxOptions;
use blackbox::SessionId;
use blackbox::ToValue;
use blackbox::{self};
use cpython::*;
use cpython_ext::PyNone;
use cpython_ext::PyPath;
use cpython_ext::ResultPyErrExt;
pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
let name = [package, "blackbox"].join(".");
let m = PyModule::new(py, &name)?;
m.add(
py,
"init",
py_fn!(
py,
init_blackbox(path: &PyPath, count: u8 = 3, size: u64 = 100000000)
),
)?;
m.add(py, "_logjson", py_fn!(py, log_json(json: String)))?;
m.add(py, "sync", py_fn!(py, sync()))?;
m.add(
py,
"sessions",
py_fn!(py, session_ids_by_pattern(json: &str)),
)?;
m.add(
py,
"events",
py_fn!(
py,
events_by_session_ids(session_ids: Vec<u64>, pattern: &str)
),
)?;
// _logjson takes a JSON string. Make it easier to use by
// exposing a 'log' function that takes a Python object.
// This is easier in Python than rust-cpython.
let d = m.dict(py);
d.set_item(py, "_json", py.import("json")?)?;
py.run(
r#"
def log(value, _dumps=_json.dumps, _logjson=_logjson):
return _logjson(_dumps(value, ensure_ascii=0, check_circular=0))"#,
Some(&d),
None,
)?;
Ok(m)
}
/// Initialize the blackbox at the given path.
fn init_blackbox(py: Python, path: &PyPath, count: u8, size: u64) -> PyResult<PyNone> {
let blackbox = BlackboxOptions::new()
.max_bytes_per_log(size)
.max_log_count(count)
.open(path)
.map_pyerr(py)?;
init(blackbox);
Ok(PyNone)
}
/// Log a JSON-serialized event. The JSON string must be deserializable
/// to the Rust Event type, defined in blackbox/src/event.rs.
fn log_json(py: Python, json: String) -> PyResult<PyNone> {
let event = Event::from_json(&json).map_pyerr(py)?;
log(&event);
Ok(PyNone)
}
/// Write buffered changes to disk.
fn sync(_py: Python) -> PyResult<PyNone> {
blackbox::sync();
Ok(PyNone)
}
/// Read events in the given time span. Return `[(session_id, timestamp, message, json)]`.
/// Timestamps are in seconds.
fn session_ids_by_pattern(py: Python, pattern: &str) -> PyResult<Vec<u64>> {
let pattern: serde_json::Value = serde_json::from_str(pattern).map_pyerr(py)?;
let blackbox = blackbox::SINGLETON.lock();
let blackbox = blackbox.deref();
Ok(blackbox
.session_ids_by_pattern(&pattern)<|fim▁hole|> .collect())
}
/// Read events with the given session ids.
/// Return `[(session_id, timestamp, message, json)]`.
fn events_by_session_ids(
py: Python,
session_ids: Vec<u64>,
pattern: &str,
) -> PyResult<Vec<(u64, f64, String, String)>> {
let pattern: serde_json::Value = serde_json::from_str(pattern).map_pyerr(py)?;
let blackbox = blackbox::SINGLETON.lock();
let blackbox = blackbox.deref();
let mut result = Vec::new();
for session_id in session_ids {
for entry in blackbox.entries_by_session_id(SessionId(session_id)) {
if !entry.match_pattern(&pattern) {
continue;
}
let json = match &entry.data {
// Skip converting TracingData to JSON.
&Event::TracingData { serialized: _ } => "{}".to_string(),
_ => serde_json::to_string(&entry.data.to_value()).unwrap(),
};
result.push((
entry.session_id,
// Translate back to float seconds.
(entry.timestamp as f64) / 1000.0,
format!("{}", entry.data),
json,
));
}
}
Ok(result)
}<|fim▁end|> | .into_iter()
.map(|id| id.0) |
<|file_name|>network.rs<|end_file_name|><|fim▁begin|>use std::io;
use std::io::Cursor;
use std::convert::From;
use std::str::FromStr;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
use regex::Regex;
use byteorder::{NetworkEndian, ReadBytesExt, WriteBytesExt};
use error::{SocketError, Result};
use util::slice2str;
macro_rules! slice2sized {
($bytes:expr, $l: expr) => (
{
let mut arr = [0u8; $l];
for i in 0..$bytes.len() {
arr[i] = $bytes[i];
}
arr
}
)
}
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub struct Address(pub String, pub u16);
#[allow(non_camel_case_types)]
pub enum AddressFamily {
AF_INET,
AF_INET6,
}
pub fn is_ipv4(ip: &str) -> bool {
Ipv4Addr::from_str(ip).is_ok()
}
pub fn is_ipv6(ip: &str) -> bool {
Ipv6Addr::from_str(ip).is_ok()
}
pub fn is_ip(ip: &str) -> bool {
is_ipv4(ip) || is_ipv6(ip)
}
// For detail, see page 7 of RFC 1035
pub fn is_hostname(hostname: &str) -> bool {
if hostname.len() > 255 {
return false;
}
lazy_static! {
static ref RE: Regex = Regex::new(r"[A-Za-z\d-]{1,63}$").unwrap();
}
let hostname = hostname.trim_right_matches('.');
hostname.as_bytes()
.split(|c| *c == b'.')
.all(|s| {
let s = slice2str(s).unwrap_or("");
!s.is_empty() && !s.starts_with('-') && !s.ends_with('-') && RE.is_match(s)
})
}
pub fn slice2ip4(data: &[u8]) -> Option<String> {
if data.len() >= 4 {
Some(format!("{}", Ipv4Addr::from(slice2sized!(data, 4))))
} else {
None
}
}
pub fn slice2ip6(data: &[u8]) -> Option<String> {
if data.len() >= 16 {
Some(format!("{}", Ipv6Addr::from(slice2sized!(data, 16))))
} else {
None
}
}
pub fn pair2addr4(ip: &str, port: u16) -> Option<SocketAddr> {
Ipv4Addr::from_str(ip).map(|ip| SocketAddr::new(IpAddr::V4(ip), port)).ok()
}
pub fn pair2addr6(ip: &str, port: u16) -> Option<SocketAddr> {
Ipv6Addr::from_str(ip).map(|ip| SocketAddr::new(IpAddr::V6(ip), port)).ok()
}
pub fn pair2addr(ip: &str, port: u16) -> Result<SocketAddr> {
let res = match pair2addr4(ip, port) {
None => pair2addr6(ip, port),
addr => addr,
};<|fim▁hole|>pub trait NetworkWriteBytes: WriteBytesExt {
fn put_u8(&mut self, num: u8) -> io::Result<()> {
self.write_u8(num)
}
fn put_u16(&mut self, num: u16) -> io::Result<()> {
self.write_u16::<NetworkEndian>(num)
}
fn put_i32(&mut self, num: i32) -> io::Result<()> {
self.write_i32::<NetworkEndian>(num)
}
}
impl NetworkWriteBytes for Vec<u8> {}
pub trait NetworkReadBytes: ReadBytesExt {
fn get_u8(&mut self) -> io::Result<u8> {
self.read_u8()
}
fn get_u16(&mut self) -> io::Result<u16> {
self.read_u16::<NetworkEndian>()
}
fn get_u32(&mut self) -> io::Result<u32> {
self.read_u32::<NetworkEndian>()
}
}
impl<'a> NetworkReadBytes for Cursor<&'a [u8]> {}
impl<'a> NetworkReadBytes for Cursor<&'a Vec<u8>> {}
impl<'a> NetworkReadBytes for &'a [u8] {
fn get_u8(&mut self) -> io::Result<u8> {
Cursor::new(self).read_u8()
}
fn get_u16(&mut self) -> io::Result<u16> {
Cursor::new(self).read_u16::<NetworkEndian>()
}
fn get_u32(&mut self) -> io::Result<u32> {
Cursor::new(self).read_u32::<NetworkEndian>()
}
}
#[macro_export]
macro_rules! pack {
(i32, $r:expr, $v:expr) => ( try_opt!($r.put_i32($v).ok()) );
(u16, $r:expr, $v:expr) => ( try_opt!($r.put_u16($v).ok()) );
(u8, $r:expr, $v:expr) => ( try_opt!($r.put_u8($v).ok()) );
}
#[macro_export]
macro_rules! unpack {
(u32, $r:expr) => ( try_opt!($r.get_u32().ok()) );
(u16, $r:expr) => ( try_opt!($r.get_u16().ok()) );
(u8, $r:expr) => ( try_opt!($r.get_u8().ok()) );
}
#[macro_export]
macro_rules! try_pack {
(i32, $r:expr, $v:expr) => ( $r.put_i32($v)? );
(u16, $r:expr, $v:expr) => ( $r.put_u16($v)? );
(u8, $r:expr, $v:expr) => ( $r.put_u8($v)? );
}
#[macro_export]
macro_rules! try_unpack {
(u32, $r:expr) => ( $r.get_u32()? );
(u16, $r:expr) => ( $r.get_u16()? );
(u8, $r:expr) => ( $r.get_u8()? );
}<|fim▁end|> | res.ok_or(From::from(SocketError::ParseAddrFailed(format!("{}:{}", ip, port))))
}
|
<|file_name|>bi_level_img_threshold.py<|end_file_name|><|fim▁begin|>#author: Nadezhda Shivarova
#date created: 30/11/15
#Description: Perform Bi-Level Image Threshold on a histogram to determine the optimal threshold level
#Using the algorithm in the paper Bi Level Img Thresholding, Antonio dos Anjos
import numpy as np
def bi_level_img_threshold( hist ):
hist = hist.flatten()
print('len hist ', len(hist))
#start and end index of the histogram
I_s = 0
I_e = len(hist)-1
print('I_e ', I_e, 'I_m ', (I_s+I_e)/2)
print('hist [Ie]', hist[I_e])
# starting point: get right and left weights of histogram and
# determine the midpoint base triangle
I_m = (I_s+I_e)/2
W_l = np.sum(hist[I_s : I_m])
W_r = np.sum(hist[I_m+1 : I_e])
print('W_l ', W_l, 'W_r ', W_r)<|fim▁hole|> #print('Wr ', W_r)
I_e = I_e - 1
#print('Ie new', I_e)
if ((I_s+I_e)/2 < I_m):
W_l = W_l - hist[I_m]
W_r = W_r + hist[I_m]
I_m = I_m - 1
#apply the algorithm mirrored, I_m tends towards depression
elif (W_l >= W_r):
W_l = W_l + hist[I_s]
I_s = I_s + 1
if ((I_s+I_e)/2 > I_m):
W_l = W_l + hist[I_m+1]
W_r = W_r - hist[I_m+1]
I_m = I_m + 1
#I_s and I_e get closer until they are equal to I_m
#I_m is the optimal threshold i.e. depression between left and right side
return I_m<|fim▁end|> |
while (I_s != I_e):
if (W_r > W_l):
W_r = W_r - hist[I_e] |
<|file_name|>test_xl_cell_to_rowcol_abs.py<|end_file_name|><|fim▁begin|>###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, [email protected]
#
import unittest
from ...utility import xl_cell_to_rowcol_abs
class TestUtility(unittest.TestCase):
"""
Test xl_cell_to_rowcol_abs() utility function.
"""
def test_xl_cell_to_rowcol_abs(self):
"""Test xl_cell_to_rowcol_abs()"""
tests = [
# row, col, A1 string
(0, 0, 'A1'),
(0, 1, 'B1'),
(0, 2, 'C1'),
(0, 9, 'J1'),<|fim▁hole|> (1, 0, 'A2'),
(2, 0, 'A3'),
(9, 0, 'A10'),
(1, 24, 'Y2'),
(7, 25, 'Z8'),
(9, 26, 'AA10'),
(1, 254, 'IU2'),
(1, 255, 'IV2'),
(1, 256, 'IW2'),
(0, 16383, 'XFD1'),
(1048576, 16384, 'XFE1048577'),
]
for row, col, string in tests:
exp = (row, col, 0, 0)
got = xl_cell_to_rowcol_abs(string)
self.assertEqual(got, exp)
def test_xl_cell_to_rowcol_abs_abs(self):
"""Test xl_cell_to_rowcol_abs() with absolute references"""
tests = [
# row, col, row_abs, col_abs, A1 string
(0, 0, 0, 0, 'A1'),
(0, 0, 1, 0, 'A$1'),
(0, 0, 0, 1, '$A1'),
(0, 0, 1, 1, '$A$1'),
]
for row, col, row_abs, col_abs, string in tests:
exp = (row, col, row_abs, col_abs)
got = xl_cell_to_rowcol_abs(string)
self.assertEqual(got, exp)<|fim▁end|> | |
<|file_name|>base_test.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at<|fim▁hole|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from oslo_utils import uuidutils
from sahara.service.edp.data_sources.base import DataSourceType
import testtools
class DataSourceBaseTestCase(testtools.TestCase):
def setUp(self):
super(DataSourceBaseTestCase, self).setUp()
self.ds_base = DataSourceType()
def test_construct_url_no_placeholders(self):
base_url = "swift://container/input"
job_exec_id = uuidutils.generate_uuid()
url = self.ds_base.construct_url(base_url, job_exec_id)
self.assertEqual(base_url, url)
def test_construct_url_job_exec_id_placeholder(self):
base_url = "swift://container/input.%JOB_EXEC_ID%.out"
job_exec_id = uuidutils.generate_uuid()
url = self.ds_base.construct_url(base_url, job_exec_id)
self.assertEqual(
"swift://container/input." + job_exec_id + ".out", url)
def test_construct_url_randstr_placeholder(self):
base_url = "swift://container/input.%RANDSTR(4)%.%RANDSTR(7)%.out"
job_exec_id = uuidutils.generate_uuid()
url = self.ds_base.construct_url(base_url, job_exec_id)
self.assertRegex(
url, "swift://container/input\.[a-z]{4}\.[a-z]{7}\.out")
def test_construct_url_randstr_and_job_exec_id_placeholder(self):
base_url = "swift://container/input.%JOB_EXEC_ID%.%RANDSTR(7)%.out"
job_exec_id = uuidutils.generate_uuid()
url = self.ds_base.construct_url(base_url, job_exec_id)
self.assertRegex(
url, "swift://container/input." + job_exec_id + "\.[a-z]{7}\.out")
def test_get_urls(self):
url = 'test://url'
cluster = mock.Mock()
job_exec_id = 'test_id'
self.assertEqual((url, url), self.ds_base.get_urls(url,
cluster, job_exec_id))<|fim▁end|> | #
# http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>ohai.py<|end_file_name|><|fim▁begin|>"""."""
<|fim▁hole|><|fim▁end|> | def get_systeminfo(resource, config, interactive=False):
"""."""
return {'ohai': 'there!'} |
<|file_name|>UserService.java<|end_file_name|><|fim▁begin|>package com.etop.service;
import com.etop.dao.UserDAO;
import com.etop.pojo.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;<|fim▁hole|> * <p/>
* Created by Jeremie on 2014/9/30.
*/
@Service("UserService")
public class UserService implements Serializable {
@Autowired
private UserDAO userDAO;
/**
* 通过用户名查找用户信息
*
* @param username
* @return
*/
public User findByName(String username) {
Map<String, Object> params = new HashMap<>();
params.put("name", username);
return userDAO.findUniqueResult("from User u where u.username = :name", params);
}
public List<User> getAllUser() {
return userDAO.find("from User u");
}
}<|fim▁end|> |
/**
* 用户服务,与dao进行对接 |
<|file_name|>avpclc.cpp<|end_file_name|><|fim▁begin|>/*
Copyright 2007 nVidia, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
*/
// NOTE: the compressor will compress RGB tiles where the input alpha is constant at 255
// using modes where the alpha is variable if that mode gives a smaller mean squared error.
#include <iostream>
#include <sstream>
#include <string>
#include <stdexcept>
#include <assert.h>
#include "ImfArray.h"
#include "targa.h"
#include "avpcl.h"
using namespace std;
static void analyze(string in1, string in2)
{
Array2D<RGBA> pin1, pin2;
int w1, h1, w2, h2;
Targa::read(in1, pin1, w1, h1);
Targa::read(in2, pin2, w2, h2);
// choose the smaller of the two dimensions (since the old compressor would truncate to multiple-of-4 sizes)
int w = MIN(w1, w2);
int h = MIN(h1, h2);
double nsamples = 0;
double mabse_rgb = 0, mabse_a = 0, mabse_rgba = 0, mse_rgb = 0, mse_a = 0, mse_rgba = 0;
int errdist_rgb[9], errdist_a[9], errdist_rgba[9];
int errs[4*16];
for (int i=0; i<9; ++i)
errdist_rgb[i] = errdist_a[i] = errdist_rgba[i] = 0;
int psnrhist[100];
for (int i=0; i<100; ++i)
psnrhist[i] = 0;
bool first = true;
int worstx, worsty;
double worstpsnr = 999.0;
bool constant_alpha = true;
for (int y = 0; y < h; y+=4)
for (int x = 0; x < w; x+=4)
{
int xw = MIN(w-x, 4);
int yw = MIN(h-y, 4);
int np = 0;
float a[4], b[4];
for (int y0=0; y0<yw; ++y0)
for (int x0=0; x0<xw; ++x0)
{
a[0] = (pin1[y+y0][x+x0]).r;
a[1] = (pin1[y+y0][x+x0]).g;
a[2] = (pin1[y+y0][x+x0]).b;
a[3] = (pin1[y+y0][x+x0]).a;
b[0] = (pin2[y+y0][x+x0]).r;
b[1] = (pin2[y+y0][x+x0]).g;
b[2] = (pin2[y+y0][x+x0]).b;
b[3] = (pin2[y+y0][x+x0]).a;
if (AVPCL::flag_premult)
{
// premultiply
for (int i=0; i<3; ++i)
{
a[i] = Utils::premult(a[i], a[3]);
b[i] = Utils::premult(b[i], b[3]);
}
}
if (a[3] != RGBA_MAX || b[3] != RGBA_MAX)
constant_alpha = false;
for (int i=0; i<4; ++i)
errs[np+i] = a[i] - b[i];
np += 4;
}
double msetile = 0.0;
for (int i = 0; i < np; ++i)
{
int err = errs[i];
int abse = err > 0 ? err : -err;
int j = i & 3;
int lsb;
for (lsb=0; (abse>>lsb)>0; ++lsb)
;
assert (lsb <= 8);
if (j == 3)
{
mabse_a += (double)abse;
mse_a += (double)abse * abse;
errdist_a[lsb]++;
}
else
{
mabse_rgb += (double)abse;
mse_rgb += (double)abse * abse;
errdist_rgb[lsb]++;
}
mabse_rgba += (double)abse;
mse_rgba += (double)abse * abse;
errdist_rgba[lsb]++;
msetile += (double)abse * abse;
}
double psnrtile, rmsetile;
rmsetile = sqrt(msetile / double(np));
psnrtile = (rmsetile == 0) ? 99.0 : 20.0 * log10(255.0/rmsetile);
if (psnrtile < worstpsnr)
{
worstx = x; worsty = y; worstpsnr = psnrtile;
}
#ifdef EXTERNAL_RELEASE
int psnrquant = (int) floor (psnrtile); // 10 means [10,11) psnrs, e.g.
// clamp just in case
psnrquant = (psnrquant < 0) ? 0 : (psnrquant > 99) ? 99 : psnrquant;
psnrhist[psnrquant]++;
if (first && psnrquant < 16)
{
first = false;
printf("Tiles with RGBA PSNR's worse than 16dB\n");
}
if (psnrquant < 16)
printf("X %4d Y %4d RGBA PSNR %7.2f\n", x, y, psnrtile);
#endif
}
nsamples = w * h;
mabse_a /= nsamples;
mse_a /= nsamples;
mabse_rgb /= (nsamples*3);
mse_rgb /= (nsamples*3);
mabse_rgba /= (nsamples*4);
mse_rgba /= (nsamples*4);
double rmse_a, psnr_a, rmse_rgb, psnr_rgb, rmse_rgba, psnr_rgba;
rmse_a = sqrt(mse_a);
psnr_a = (rmse_a == 0) ? 999.0 : 20.0 * log10(255.0/rmse_a);
rmse_rgb = sqrt(mse_rgb);
psnr_rgb = (rmse_rgb == 0) ? 999.0 : 20.0 * log10(255.0/rmse_rgb);
rmse_rgba = sqrt(mse_rgba);
psnr_rgba = (rmse_rgba == 0) ? 999.0 : 20.0 * log10(255.0/rmse_rgba);
printf("Image size compared: %dw x %dh\n", w, h);
printf("Image alpha is %s.\n", constant_alpha ? "CONSTANT" : "VARIABLE");
if (w != w1 || w != w2 || h != h1 || h != h2)
printf("--- NOTE: only the overlap between the 2 images (%d,%d) and (%d,%d) was compared\n", w1, h1, w2, h2);
printf("Total pixels: %12d\n", w * h);
char *which = !AVPCL::flag_premult ? "RGB" : "aRaGaB";
printf("\n%s Mean absolute error: %f\n", which, mabse_rgb);
printf("%s Root mean squared error: %f (MSE %f)\n", which, rmse_rgb, rmse_rgb*rmse_rgb);
printf("%s Peak signal to noise ratio in dB: %f\n", which, psnr_rgb);
printf("%s Histogram of number of channels with indicated LSB error\n", which);
for (int i = 0; i < 9; ++i)
if (errdist_rgb[i]) printf("%2d LSB error: %10d\n", i, errdist_rgb[i]);
printf("\nAlpha Mean absolute error: %f\n", mabse_a);
printf("Alpha Root mean squared error: %f (MSE %f)\n", rmse_a, rmse_a*rmse_a);
printf("Alpha Peak signal to noise ratio in dB: %f\n", psnr_a);
printf("Alpha Histogram of number of channels with indicated LSB error\n");
for (int i = 0; i < 9; ++i)
if (errdist_a[i]) printf("%2d LSB error: %10d\n", i, errdist_a[i]);
printf("\nRGBA Mean absolute error: %f\n", mabse_rgba);
printf("RGBA Root mean squared error: %f (MSE %f)\n", rmse_rgba, rmse_rgba*rmse_rgba);
printf("RGBA Peak signal to noise ratio in dB: %f\n", psnr_rgba);
printf("RGBA Histogram of number of channels with indicated LSB error\n");
for (int i = 0; i < 9; ++i)
if (errdist_rgba[i]) printf("%2d LSB error: %10d\n", i, errdist_rgba[i]);
printf("\nWorst tile RGBA PSNR %f at x %d y %d\n", worstpsnr, worstx, worsty);
#if 0
printf("Histogram of per-tile PSNR\n");
for (int i = 0; i < 100; ++i)
if (psnrhist[i])
printf("[%2d,%2d) %6d\n", i, i+1, psnrhist[i]);
#endif
}
static bool ext(string inf, char *extension)
{
size_t n = inf.rfind('.', inf.length()-1);
if (n != string::npos)
return inf.substr(n, inf.length()) == extension;
else if (*extension != '\0')
return false;
else
return true; // extension is null and we didn't find a .
}
template <typename T>
std::string toString(const T &thing)
{
std::stringstream os;
os << thing;
return os.str();
}
static int str2int(std::string s)
{
int thing;
std::stringstream str (stringstream::in | stringstream::out);
str << s;
str >> thing;
return thing;
}
static void usage()
{
cout << endl <<
"Usage:" << endl <<
"avpclc infile.tga outroot generates outroot-w-h.avpcl and outroot-avpcl.tga" << endl <<
"avpclc foo-w-h.avpcl outroot generates outroot-avpcl.tga" << endl <<
"avpclc infile.tga outfile.tga compares the two images" << endl << endl <<
"Flags:" << endl <<
"-p use a metric based on AR AG AB A (note: if the image has alpha constant 255 this option is overridden)" << endl <<
"-n use a non-uniformly-weighed metric (weights .299 .587 .114)" << endl <<
"-na use a non-uniformly-weighed metric (ATI weights .3086 .6094 .0820)" << endl <<
"-e dump squared errors for each tile to outroot-errors.bin" << endl;
}
bool AVPCL::flag_premult = false;
bool AVPCL::flag_nonuniform = false;
bool AVPCL::flag_nonuniform_ati = false;
bool AVPCL::mode_rgb = false;
int main(int argc, char* argv[])
{
bool noerrfile = true;
#ifdef EXTERNAL_RELEASE
cout << "avpcl/BC7L Targa RGBA Compressor/Decompressor version 1.41 (May 27, 2010)." << endl <<
"Bug reports, questions, and suggestions to wdonovan a t nvidia d o t com." << endl;
#endif
try
{
char * args[2];
int nargs = 0;
// process flags, copy any non flag arg to args[]
for (int i = 1; i < argc; ++i)
if ((argv[i])[0] == '-')
switch ((argv[i])[1]) {
case 'p': AVPCL::flag_premult = true; break;
case 'n': if ((argv[i])[2] == 'a') { AVPCL::flag_nonuniform_ati = true; AVPCL::flag_nonuniform = false; }
else { AVPCL::flag_nonuniform = true; AVPCL::flag_nonuniform_ati = false; }<|fim▁hole|> default: throw "bad flag arg";
}
else
{
if (nargs > 1) throw "Incorrect number of args";
args[nargs++] = argv[i];
}
if (nargs != 2) throw "Incorrect number of args";
string inf(args[0]), outroot(args[1]);
if (ext(outroot, ""))
{
if (ext(inf, ".tga"))
{
int width, height;
Targa::fileinfo(inf, width, height, AVPCL::mode_rgb);
string outf, avpclf, errf;
outf = outroot + "-avpcl.tga";
avpclf = outroot + "-" + toString(width) + "-" + toString(height) + "-" + (AVPCL::mode_rgb ? "RGB" : "RGBA") + ".avpcl";
cout << "Compressing " << (AVPCL::mode_rgb ? "RGB file " : "RGBA file ") << inf << " to " << avpclf << endl;
if (!noerrfile)
{
errf = outroot + "-errors" + ".bin";
cout << "Errors output file is " << errf << endl;
}
else
errf = "";
AVPCL::compress(inf, avpclf, errf);
cout << "Decompressing " << avpclf << " to " << outf << endl;
AVPCL::decompress(avpclf, outf);
analyze(inf, outf);
}
else if (ext(inf, ".avpcl"))
{
string outf;
outf = outroot + "-avpcl.tga";
cout << "Decompressing " << inf << " to " << outf << endl;
AVPCL::decompress(inf, outf);
}
else throw "Invalid file args";
}
else if (ext(inf, ".tga") && ext(outroot, ".tga"))
{
analyze(inf, outroot);
}
else throw "Invalid file args";
}
catch(const exception& e)
{
// Print error message and usage instructions
cerr << e.what() << endl;
usage();
return 1;
}
catch(char * msg)
{
cerr << msg << endl;
usage();
return 1;
}
return 0;
}<|fim▁end|> | break;
case 'e': noerrfile = false; break; |
<|file_name|>config.example.js<|end_file_name|><|fim▁begin|>// # Ghost Configuration
// Setup your Ghost install for various environments
// Documentation can be found at http://support.ghost.org/config/
var path = require('path'),
config;
config = {
// ### Production
// When running Ghost in the wild, use the production environment
// Configure your URL and mail settings here
production: {
url: 'http://my-ghost-blog.com',
mail: {},
database: {
client: 'sqlite3',
connection: {
filename: path.join(__dirname, '/content/data/ghost.db')
},
debug: false
},
// 配置MySQL 数据库
/*database: {
client: 'mysql',
connection: {
host : 'host',
user : 'user',
password : 'password',
database : 'database',
charset : 'utf8'
},
debug: false
},*/
server: {
// Host to be passed to node's `net.Server#listen()`
host: '127.0.0.1',
// Port to be passed to node's `net.Server#listen()`, for iisnode set this to `process.env.PORT`
port: '2368'
},
//Storage.Now,we can support `qiniu`,`upyun`, `aliyun oss`, `aliyun ace-storage` and `local-file-store`
storage: {
provider: 'local-file-store'
}
// or
// 参考文档: http://www.ghostchina.com/qiniu-cdn-for-ghost/
/*storage: {
provider: 'qiniu',
bucketname: 'your-bucket-name',
ACCESS_KEY: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
SECRET_KEY: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
root: '/image/',
prefix: 'http://your-bucket-name.qiniudn.com'
}*/
// or
// 参考文档: http://www.ghostchina.com/upyun-cdn-for-ghost/
/*storage: {
provider: 'upyun',
bucketname: 'your-bucket-name',
username: 'your user name',
password: 'your password',
root: '/image/',
prefix: 'http://your-bucket-name.b0.upaiyun.com'
}*/
// or
// 参考文档: http://www.ghostchina.com/aliyun-oss-for-ghost/
/*storage: {
provider: 'oss',
bucketname: 'your-bucket-name',
ACCESS_KEY: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
SECRET_KEY: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
root: '/image/',
prefix: 'http://your-bucket-name.oss-cn-hangzhou.aliyuncs.com'
}*/
// or
// 参考文档: http://www.ghostchina.com/install-ghost-on-aliyun-ace/
/*storage: {
provider: 'ace-storage',
bucketname: 'your-bucket-name'
}*/
},
// ### Development **(default)**
development: {
// The url to use when providing links to the site, E.g. in RSS and email.
// Change this to your Ghost blogs published URL.
url: 'http://localhost:2368',
// Example mail config
// Visit http://support.ghost.org/mail for instructions
// ```
// mail: {
// transport: 'SMTP',
// options: {
<|fim▁hole|> // }
// }
// },
// ```
database: {
client: 'sqlite3',
connection: {
filename: path.join(__dirname, '/content/data/ghost-dev.db')
},
debug: false
},
server: {
// Host to be passed to node's `net.Server#listen()`
host: '127.0.0.1',
// Port to be passed to node's `net.Server#listen()`, for iisnode set this to `process.env.PORT`
port: '2368'
},
paths: {
contentPath: path.join(__dirname, '/content/')
}
},
// **Developers only need to edit below here**
// ### Testing
// Used when developing Ghost to run tests and check the health of Ghost
// Uses a different port number
testing: {
url: 'http://127.0.0.1:2369',
database: {
client: 'sqlite3',
connection: {
filename: path.join(__dirname, '/content/data/ghost-test.db')
}
},
server: {
host: '127.0.0.1',
port: '2369'
},
logging: false
},
// ### Testing MySQL
// Used by Travis - Automated testing run through GitHub
'testing-mysql': {
url: 'http://127.0.0.1:2369',
database: {
client: 'mysql',
connection: {
host : '127.0.0.1',
user : 'root',
password : '',
database : 'ghost_testing',
charset : 'utf8'
}
},
server: {
host: '127.0.0.1',
port: '2369'
},
logging: false
},
// ### Testing pg
// Used by Travis - Automated testing run through GitHub
'testing-pg': {
url: 'http://127.0.0.1:2369',
database: {
client: 'pg',
connection: {
host : '127.0.0.1',
user : 'postgres',
password : '',
database : 'ghost_testing',
charset : 'utf8'
}
},
server: {
host: '127.0.0.1',
port: '2369'
},
logging: false
}
};
// Export config
module.exports = config;<|fim▁end|> | // service: 'Mailgun',
// auth: {
// user: '', // mailgun username
// pass: '' // mailgun password
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.