prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>with-mark-object.js<|end_file_name|><|fim▁begin|>/** @jsx h */
import h from '../../../helpers/h'
import { Mark } from '../../../..'
export default function(change) {
change.addMark(
Mark.create({
type: 'bold',
data: { thing: 'value' },
})<|fim▁hole|>}
export const input = (
<value>
<document>
<paragraph>
<anchor />w<focus />ord
</paragraph>
</document>
</value>
)
export const output = (
<value>
<document>
<paragraph>
<anchor />
<b thing="value">w</b>
<focus />ord
</paragraph>
</document>
</value>
)<|fim▁end|> | ) |
<|file_name|>nsRuleNetwork.cpp<|end_file_name|><|fim▁begin|>/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
Implementations for the rule network classes.
To Do.
- Constrain() & Propagate() still feel like they are poorly named.
- As do Instantiation and InstantiationSet.
- Make InstantiationSet share and do copy-on-write.
- Make things iterative, instead of recursive.
*/
#include "nscore.h"
#include "nsCOMPtr.h"
#include "plhash.h"
#include "mozilla/Logging.h"
#include "nsString.h"
#include "nsUnicharUtils.h"
#include "nsXULContentUtils.h"
#include "nsRuleNetwork.h"
#include "nsXULTemplateResultSetRDF.h"
#include "nsRDFConMemberTestNode.h"
#include "nsRDFPropertyTestNode.h"
using namespace mozilla;
extern LazyLogModule gXULTemplateLog;
//----------------------------------------------------------------------
//
// nsRuleNetwork
//
nsresult
MemoryElementSet::Add(MemoryElement* aElement)
{
for (ConstIterator element = First(); element != Last(); ++element) {
if (*element == *aElement) {
// We've already got this element covered. Since Add()
// assumes ownership, and we aren't going to need this,
// just nuke it.
delete aElement;
return NS_OK;
}
}
List* list = new List;
list->mElement = aElement;
list->mRefCnt = 1;
list->mNext = mElements;
mElements = list;
return NS_OK;
}
//----------------------------------------------------------------------
nsresult
nsAssignmentSet::Add(const nsAssignment& aAssignment)<|fim▁hole|>{
NS_PRECONDITION(! HasAssignmentFor(aAssignment.mVariable), "variable already bound");
// XXXndeakin should this just silently fail?
if (HasAssignmentFor(aAssignment.mVariable))
return NS_ERROR_UNEXPECTED;
List* list = new List(aAssignment);
list->mRefCnt = 1;
list->mNext = mAssignments;
mAssignments = list;
return NS_OK;
}
int32_t
nsAssignmentSet::Count() const
{
int32_t count = 0;
for (ConstIterator assignment = First(); assignment != Last(); ++assignment)
++count;
return count;
}
bool
nsAssignmentSet::HasAssignment(nsIAtom* aVariable, nsIRDFNode* aValue) const
{
for (ConstIterator assignment = First(); assignment != Last(); ++assignment) {
if (assignment->mVariable == aVariable && assignment->mValue == aValue)
return true;
}
return false;
}
bool
nsAssignmentSet::HasAssignmentFor(nsIAtom* aVariable) const
{
for (ConstIterator assignment = First(); assignment != Last(); ++assignment) {
if (assignment->mVariable == aVariable)
return true;
}
return false;
}
bool
nsAssignmentSet::GetAssignmentFor(nsIAtom* aVariable, nsIRDFNode** aValue) const
{
for (ConstIterator assignment = First(); assignment != Last(); ++assignment) {
if (assignment->mVariable == aVariable) {
*aValue = assignment->mValue;
NS_IF_ADDREF(*aValue);
return true;
}
}
*aValue = nullptr;
return false;
}
bool
nsAssignmentSet::Equals(const nsAssignmentSet& aSet) const
{
if (aSet.mAssignments == mAssignments)
return true;
// If they have a different number of assignments, then they're different.
if (Count() != aSet.Count())
return false;
// XXX O(n^2)! Ugh!
nsCOMPtr<nsIRDFNode> value;
for (ConstIterator assignment = First(); assignment != Last(); ++assignment) {
if (! aSet.GetAssignmentFor(assignment->mVariable, getter_AddRefs(value)))
return false;
if (assignment->mValue != value)
return false;
}
return true;
}
//----------------------------------------------------------------------
PLHashNumber
Instantiation::Hash(const void* aKey)
{
const Instantiation* inst = static_cast<const Instantiation*>(aKey);
PLHashNumber result = 0;
nsAssignmentSet::ConstIterator last = inst->mAssignments.Last();
for (nsAssignmentSet::ConstIterator assignment = inst->mAssignments.First();
assignment != last; ++assignment)
result ^= assignment->Hash();
return result;
}
int
Instantiation::Compare(const void* aLeft, const void* aRight)
{
const Instantiation* left = static_cast<const Instantiation*>(aLeft);
const Instantiation* right = static_cast<const Instantiation*>(aRight);
return *left == *right;
}
//----------------------------------------------------------------------
//
// InstantiationSet
//
InstantiationSet::InstantiationSet()
{
mHead.mPrev = mHead.mNext = &mHead;
MOZ_COUNT_CTOR(InstantiationSet);
}
InstantiationSet::InstantiationSet(const InstantiationSet& aInstantiationSet)
{
mHead.mPrev = mHead.mNext = &mHead;
// XXX replace with copy-on-write foo
ConstIterator last = aInstantiationSet.Last();
for (ConstIterator inst = aInstantiationSet.First(); inst != last; ++inst)
Append(*inst);
MOZ_COUNT_CTOR(InstantiationSet);
}
InstantiationSet&
InstantiationSet::operator=(const InstantiationSet& aInstantiationSet)
{
// XXX replace with copy-on-write foo
Clear();
ConstIterator last = aInstantiationSet.Last();
for (ConstIterator inst = aInstantiationSet.First(); inst != last; ++inst)
Append(*inst);
return *this;
}
void
InstantiationSet::Clear()
{
Iterator inst = First();
while (inst != Last())
Erase(inst++);
}
InstantiationSet::Iterator
InstantiationSet::Insert(Iterator aIterator, const Instantiation& aInstantiation)
{
List* newelement = new List();
if (newelement) {
newelement->mInstantiation = aInstantiation;
aIterator.mCurrent->mPrev->mNext = newelement;
newelement->mNext = aIterator.mCurrent;
newelement->mPrev = aIterator.mCurrent->mPrev;
aIterator.mCurrent->mPrev = newelement;
}
return aIterator;
}
InstantiationSet::Iterator
InstantiationSet::Erase(Iterator aIterator)
{
Iterator result = aIterator;
++result;
aIterator.mCurrent->mNext->mPrev = aIterator.mCurrent->mPrev;
aIterator.mCurrent->mPrev->mNext = aIterator.mCurrent->mNext;
delete aIterator.mCurrent;
return result;
}
bool
InstantiationSet::HasAssignmentFor(nsIAtom* aVariable) const
{
return !Empty() ? First()->mAssignments.HasAssignmentFor(aVariable) : false;
}
//----------------------------------------------------------------------
//
// ReteNode
//
// The basic node in the network.
//
//----------------------------------------------------------------------
//
// TestNode
//
// to do:
// - FilterInstantiations() is poorly named
//
TestNode::TestNode(TestNode* aParent)
: mParent(aParent)
{
}
nsresult
TestNode::Propagate(InstantiationSet& aInstantiations,
bool aIsUpdate, bool& aTakenInstantiations)
{
MOZ_LOG(gXULTemplateLog, LogLevel::Debug,
("TestNode[%p]: Propagate() begin", this));
aTakenInstantiations = false;
nsresult rv = FilterInstantiations(aInstantiations, nullptr);
if (NS_FAILED(rv))
return rv;
// if there is more than one child, each will need to be supplied with the
// original set of instantiations from this node, so create a copy in this
// case. If there is only one child, optimize and just pass the
// instantiations along to the child without copying
bool shouldCopy = (mKids.Count() > 1);
// See the header file for details about how instantiation ownership works.
if (! aInstantiations.Empty()) {
ReteNodeSet::Iterator last = mKids.Last();
for (ReteNodeSet::Iterator kid = mKids.First(); kid != last; ++kid) {
MOZ_LOG(gXULTemplateLog, LogLevel::Debug,
("TestNode[%p]: Propagate() passing to child %p", this, kid.operator->()));
// create a copy of the instantiations
if (shouldCopy) {
bool owned = false;
InstantiationSet* instantiations =
new InstantiationSet(aInstantiations);
rv = kid->Propagate(*instantiations, aIsUpdate, owned);
if (!owned)
delete instantiations;
if (NS_FAILED(rv))
return rv;
}
else {
rv = kid->Propagate(aInstantiations, aIsUpdate, aTakenInstantiations);
if (NS_FAILED(rv))
return rv;
}
}
}
MOZ_LOG(gXULTemplateLog, LogLevel::Debug,
("TestNode[%p]: Propagate() end", this));
return NS_OK;
}
nsresult
TestNode::Constrain(InstantiationSet& aInstantiations)
{
nsresult rv;
MOZ_LOG(gXULTemplateLog, LogLevel::Debug,
("TestNode[%p]: Constrain() begin", this));
// if the cantHandleYet flag is set by FilterInstantiations,
// there isn't enough information yet available to fill in.
// For this, continue the constrain all the way to the top
// and then call FilterInstantiations again afterwards. This
// should fill in any missing information.
bool cantHandleYet = false;
rv = FilterInstantiations(aInstantiations, &cantHandleYet);
if (NS_FAILED(rv)) return rv;
if (mParent && (!aInstantiations.Empty() || cantHandleYet)) {
// if we still have instantiations, or if the instantiations
// could not be filled in yet, then ride 'em on up to the
// parent to narrow them.
MOZ_LOG(gXULTemplateLog, LogLevel::Debug,
("TestNode[%p]: Constrain() passing to parent %p", this, mParent));
rv = mParent->Constrain(aInstantiations);
if (NS_SUCCEEDED(rv) && cantHandleYet)
rv = FilterInstantiations(aInstantiations, nullptr);
}
else {
MOZ_LOG(gXULTemplateLog, LogLevel::Debug,
("TestNode[%p]: Constrain() failed", this));
rv = NS_OK;
}
MOZ_LOG(gXULTemplateLog, LogLevel::Debug,
("TestNode[%p]: Constrain() end", this));
return rv;
}
//----------------------------------------------------------------------
ReteNodeSet::ReteNodeSet()
: mNodes(nullptr), mCount(0), mCapacity(0)
{
}
ReteNodeSet::~ReteNodeSet()
{
Clear();
}
nsresult
ReteNodeSet::Add(ReteNode* aNode)
{
NS_PRECONDITION(aNode != nullptr, "null ptr");
if (! aNode)
return NS_ERROR_NULL_POINTER;
if (mCount >= mCapacity) {
int32_t capacity = mCapacity + 4;
ReteNode** nodes = new ReteNode*[capacity];
if (! nodes)
return NS_ERROR_OUT_OF_MEMORY;
for (int32_t i = mCount - 1; i >= 0; --i)
nodes[i] = mNodes[i];
delete[] mNodes;
mNodes = nodes;
mCapacity = capacity;
}
mNodes[mCount++] = aNode;
return NS_OK;
}
nsresult
ReteNodeSet::Clear()
{
delete[] mNodes;
mNodes = nullptr;
mCount = mCapacity = 0;
return NS_OK;
}<|fim▁end|> | |
<|file_name|>wire.py<|end_file_name|><|fim▁begin|>""" A component that designates a wire. """
from graph import Node, Edge
from constraint import Constraint
class Wire(object):
""" Wire component """
def __init__(self, graph, node_a=None, node_b=None, edge_i=None):
""" Initializes a wire with two nodes. Current goes from
A to B. If nodes / edges aren't supplied, new ones are created.
Supplied nodes / edges should be part of the supplied graph.
Args:
graph : Graph object
node_a : Node object
node_b : Node object
edge_i : Edge object
Returns:
Wire object
"""
if not node_a:
node_a = Node(graph)
if not node_b:
node_b = Node(graph)
if not edge_i:
edge_i = Edge(graph, node_a, node_b)
self._node_a = node_a
self._node_b = node_b
self._edge_i = edge_i
def node_a(self):
""" Returns node A.
Returns:
Node object
"""
return self._node_a
def node_b(self):<|fim▁hole|> Returns:
Node object
"""
return self._node_b
def edge_i(self):
""" Returns the edge that stores current from A to B.
Returns:
Edge object
"""
return self._edge_i
def substitutions(self):
""" Return a dictionary mapping each symbol to a value. Return
an empty dictionary if no substitutions exist
Returns:
dictionary from sympy variable to value
"""
return {}
def variables(self):
""" Returns a set of variables under constraints.
Returns:
set of Nodes, Edges, tuples, or strings
"""
return set([self._node_a, self._node_b, self._edge_i])
def constraints(self):
""" Returns a list of constraints that must be solved.
A constraint is a tuple (coefficients, variables), where
coefficients is a list of numbers corresponding to the linear
equation:
A_0 * x_0 + A_1 * x_1 + ... + A_{n-1} * x_{n-1} = 0,
and variables is a list of the Node and Edge objects.
Returns:
List of Constraint objects
"""
cs = [1, -1]
xs = [self._node_a, self._node_b]
constraint = Constraint(cs, xs)
return [constraint]<|fim▁end|> | """ Returns node B.
|
<|file_name|>test_sequence_compare.py<|end_file_name|><|fim▁begin|>import dijon
def test_compare_sequence_no_difference():
source_data = ['a', 'b', 'c']
target_data = ['a', 'b', 'c']
diff_graph = dijon.compare(source_data, target_data)
diff_nodes = [n for n in diff_graph.iter_nodes(differences=True)]
assert len(diff_nodes) == 0
def test_compare_sequence_append():
source_data = ['a', 'b', 'c']
target_data = ['a', 'b', 'c', 'd']
diff_graph = dijon.compare(source_data, target_data)
diff_nodes = [n for n in diff_graph.iter_nodes(differences=True)]
assert len(diff_nodes) == 1
difference = diff_nodes[0]
assert isinstance(difference, dijon.SequenceItemAddition)
assert difference.full_path == ('root', (None, 3))
assert difference.source is None
assert difference.target.full_path == ('root', (None, 3), 3)
assert difference.target.value == 'd'
def test_compare_sequence_insert():
source_data = ['a', 'b', 'c']
target_data = ['z', 'a', 'b', 'c']
diff_graph = dijon.compare(source_data, target_data)
diff_nodes = [n for n in diff_graph.iter_nodes(differences=True)]
assert len(diff_nodes) == 4
difference = diff_nodes[0]
assert isinstance(difference, dijon.SequenceItemModification)
assert difference.full_path == ('root', (0, 1))
assert difference.source.full_path == ('root', (0, 1), 0)
assert difference.source.data == 'a'
assert difference.target.full_path == ('root', (0, 1), 1)
assert difference.target.data == 'a'
difference = diff_nodes[1]
assert isinstance(difference, dijon.SequenceItemModification)
assert difference.full_path == ('root', (1, 2))
assert difference.source.full_path == ('root', (1, 2), 1)
assert difference.source.data == 'b'
assert difference.target.full_path == ('root', (1, 2), 2)
assert difference.target.data == 'b'
difference = diff_nodes[2]
assert isinstance(difference, dijon.SequenceItemModification)
assert difference.full_path == ('root', (2, 3))
assert difference.source.full_path == ('root', (2, 3), 2)
assert difference.source.data == 'c'
assert difference.target.full_path == ('root', (2, 3), 3)
assert difference.target.data == 'c'
difference = diff_nodes[3]
assert isinstance(difference, dijon.SequenceItemAddition)
assert difference.full_path == ('root', (None, 0))
assert difference.source is None
assert difference.target.full_path == ('root', (None, 0), 0)
assert difference.target.data == 'z'
target_data = ['a', 'b', 'z', 'c']
diff_graph = dijon.compare(source_data, target_data)
diff_nodes = [n for n in diff_graph.iter_nodes(differences=True)]
assert len(diff_nodes) == 2
difference = diff_nodes[0]
assert isinstance(difference, dijon.SequenceItemModification)
assert difference.full_path == ('root', (2, 3))
assert difference.source.full_path == ('root', (2, 3), 2)
assert difference.source.data == 'c'
assert difference.target.full_path == ('root', (2, 3), 3)
assert difference.target.data == 'c'
difference = diff_nodes[1]
assert isinstance(difference, dijon.SequenceItemAddition)
assert difference.full_path == ('root', (None, 2))
assert difference.source is None
assert difference.target.full_path == ('root', (None, 2), 2)
assert difference.target.data == 'z'
def test_compare_sequence_deletion():
source_data = ['a', 'b', 'c']
target_data = ['a', 'b']
diff_graph = dijon.compare(source_data, target_data)
diff_nodes = [n for n in diff_graph.iter_nodes(differences=True)]
assert len(diff_nodes) == 1
difference = diff_nodes[0]
assert isinstance(difference, dijon.SequenceItemDeletion)
assert difference.full_path == ('root', (2, None))
assert difference.source.full_path == ('root', (2, None), 2)
assert difference.source.data == 'c'
assert difference.target is None
target_data = ['b', 'c']
diff_graph = dijon.compare(source_data, target_data)
diff_nodes = [n for n in diff_graph.iter_nodes(differences=True)]
assert len(diff_nodes) == 3
difference = diff_nodes[0]
assert isinstance(difference, dijon.SequenceItemModification)
assert difference.full_path == ('root', (1, 0))
assert difference.source.full_path == ('root', (1, 0), 1)
assert difference.source.data == 'b'
assert difference.target.full_path == ('root', (1, 0), 0)
assert difference.target.data == 'b'
difference = diff_nodes[1]
assert isinstance(difference, dijon.SequenceItemModification)
assert difference.full_path == ('root', (2, 1))
assert difference.source.full_path == ('root', (2, 1), 2)
assert difference.source.data == 'c'
assert difference.target.full_path == ('root', (2, 1), 1)
assert difference.target.data == 'c'
difference = diff_nodes[2]
assert isinstance(difference, dijon.SequenceItemDeletion)
assert difference.full_path == ('root', (0, None))
assert difference.source.full_path == ('root', (0, None), 0)
assert difference.source.data == 'a'
assert difference.target is None
target_data = ['a', 'c']
diff_graph = dijon.compare(source_data, target_data)
diff_nodes = [n for n in diff_graph.iter_nodes(differences=True)]
assert len(diff_nodes) == 2
difference = diff_nodes[0]
assert isinstance(difference, dijon.SequenceItemModification)
assert difference.full_path == ('root', (2, 1))
assert difference.source.full_path == ('root', (2, 1), 2)<|fim▁hole|> assert difference.source.data == 'c'
assert difference.target.full_path == ('root', (2, 1), 1)
assert difference.target.data == 'c'
difference = diff_nodes[1]
assert isinstance(difference, dijon.SequenceItemDeletion)
assert difference.full_path == ('root', (1, None))
assert difference.source.full_path == ('root', (1, None), 1)
assert difference.source.data == 'b'
assert difference.target is None<|fim▁end|> | |
<|file_name|>phpdbg.js<|end_file_name|><|fim▁begin|>'use strict';
var element = require('../element');
module.exports = function(node) {
var el = element('phpdbg');
el.innerHTML = node.nodeValue;<|fim▁hole|><|fim▁end|> | return el;
}; |
<|file_name|>AgentdService.java<|end_file_name|><|fim▁begin|>package com.wincom.dcim.agentd;
import java.util.Properties;
import java.util.Set;
public interface AgentdService {
void registerCodecFactory(String key, CodecFactory factory);
void unregisterCodecFactory(String key);<|fim▁hole|> Set<String> getCodecFactoryKeys();
/**
* Create or get a <codec>Codec</codec>. FIXME: this interface design is
* problem.
*
* @param factoryId
* @param codecId
* @param props
* @return
*/
Codec createCodec(String factoryId, String codecId, Properties props);
Codec getCodec(String codecId);
void setCodec(String codecId, Codec codec);
}<|fim▁end|> | |
<|file_name|>loadBalancingPolicySelector.component.ts<|end_file_name|><|fim▁begin|>import type { IComponentOptions, IController } from 'angular';
import { module } from 'angular';
import { chain, get, has, intersection, set, without } from 'lodash';
import type { IGceBackendService, INamedPort } from '../../../../domain';
import './loadBalancingPolicySelector.component.less';
class GceLoadBalancingPolicySelectorController implements IController {
public maxPort = 65535;
public command: any;
[key: string]: any;
public globalBackendServices: IGceBackendService[];
public static $inject = ['gceBackendServiceReader'];
constructor(private gceBackendServiceReader: any) {}
public setModel(propertyName: string, viewValue: number): void {
set(this, propertyName, viewValue / 100);
}
public setView(propertyName: string, modelValue: number): void {
this[propertyName] = this.decimalToPercent(modelValue);
}
public onBalancingModeChange(mode: string): void {
const keys: string[] = ['maxUtilization', 'maxRatePerInstance', 'maxConnectionsPerInstance'];
let toDelete: string[] = [];
switch (mode) {
case 'RATE':
toDelete = without(keys, 'maxRatePerInstance');
break;
case 'UTILIZATION':
toDelete = without(keys, 'maxUtilization');
break;
case 'CONNECTION':
toDelete = without(keys, 'maxConnectionsPerInstance');
break;
default:
break;
}
toDelete.forEach((key) => delete this.command.loadBalancingPolicy[key]);
}
public getBalancingModes(): string[] {
let balancingModes: string[] = [];
/*
* Three cases:
* - If we have only HTTP(S) load balancers, our balancing mode can be RATE or UTILIZATION.
* - If we have only SSL/TCP load balancers, our balancing mode can be CONNECTION or UTILIZATION.
* - If we have both, only UTILIZATION.
* */
if (has(this, 'command.backingData.filtered.loadBalancerIndex')) {
const index = this.command.backingData.filtered.loadBalancerIndex;
const selected = this.command.loadBalancers;
const hasSsl = selected.find((loadBalancer: any) => get(index[loadBalancer], 'loadBalancerType') === 'SSL');
const hasTcp = selected.find((loadBalancer: any) => get(index[loadBalancer], 'loadBalancerType') === 'TCP');
const hasHttp = selected.find((loadBalancer: any) => get(index[loadBalancer], 'loadBalancerType') === 'HTTP');
if ((hasSsl || hasTcp) && hasHttp) {
balancingModes = ['UTILIZATION'];
} else if (hasSsl || hasTcp) {
balancingModes = ['CONNECTION', 'UTILIZATION'];
} else {
balancingModes = ['RATE', 'UTILIZATION'];
}
}
if (!balancingModes.includes(get(this.command, 'loadBalancingPolicy.balancingMode') as string)) {
set(this.command, 'loadBalancingPolicy.balancingMode', balancingModes[0]);
}
return balancingModes;
}
public $onInit(): void {
this.gceBackendServiceReader.listBackendServices('globalBackendService').then((services: IGceBackendService[]) => {
this.globalBackendServices = services;
});
}
public $onDestroy(): void {
delete this.command.loadBalancingPolicy;
}
public addNamedPort() {
if (!get(this.command, 'loadBalancingPolicy.namedPorts')) {
set(this.command, 'loadBalancingPolicy.namedPorts', []);
}
this.command.loadBalancingPolicy.namedPorts.push({ name: '', port: 80 });
}
public removeNamedPort(index: number) {
this.command.loadBalancingPolicy.namedPorts.splice(index, 1);
}
public getPortNames(): string[] {
const index = this.command.backingData.filtered.loadBalancerIndex;
const selected = this.command.loadBalancers;
const inUsePortNames = this.command.loadBalancingPolicy.namedPorts.map((namedPort: INamedPort) => namedPort.name);
const getThem = (globalBackendServices: IGceBackendService[], loadBalancer: string): string[] => {
switch (get(index[loadBalancer], 'loadBalancerType')) {
case 'SSL':
case 'TCP':
case 'HTTP': {
const lbBackendServices: string[] = get(index[loadBalancer], 'backendServices');
const filteredBackendServices = globalBackendServices.filter((service: IGceBackendService) =>
lbBackendServices.includes(service.name),<|fim▁hole|> return portNames.filter((portName) => !portNameIntersection.includes(portName));
}
default:
return [];
}
};
return chain(selected)
.flatMap((lbName: string) => getThem(this.globalBackendServices, lbName))
.uniq()
.value();
}
private decimalToPercent(value: number): number {
if (value === 0) {
return 0;
}
return value ? Math.round(value * 100) : undefined;
}
}
const gceLoadBalancingPolicySelectorComponent: IComponentOptions = {
bindings: {
command: '=',
},
controller: GceLoadBalancingPolicySelectorController,
templateUrl: require('./loadBalancingPolicySelector.component.html'),
};
export const GCE_LOAD_BALANCING_POLICY_SELECTOR = 'spinnaker.gce.loadBalancingPolicy.selector.component';
module(GCE_LOAD_BALANCING_POLICY_SELECTOR, []).component(
'gceLoadBalancingPolicySelector',
gceLoadBalancingPolicySelectorComponent,
);<|fim▁end|> | );
const portNames = filteredBackendServices.map((service: IGceBackendService) => service.portName);
const portNameIntersection = intersection(portNames, inUsePortNames); |
<|file_name|>PhysicsFrame.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|> @maintainer Morgan McGuire, http://graphics.cs.williams.edu
@created 2002-07-09
@edited 2006-01-25
*/
#include "G3D/platform.h"
#include "G3D/PhysicsFrame.h"
#include "G3D/BinaryInput.h"
#include "G3D/BinaryOutput.h"
namespace G3D {
PhysicsFrame::PhysicsFrame() {
translation = Vector3::zero();
rotation = Quat();
}
PhysicsFrame::PhysicsFrame(
const CoordinateFrame& coordinateFrame) {
translation = coordinateFrame.translation;
rotation = Quat(coordinateFrame.rotation);
}
PhysicsFrame PhysicsFrame::operator*(const PhysicsFrame& other) const {
PhysicsFrame result;
result.rotation = rotation * other.rotation;
result.translation = translation + rotation.toRotationMatrix() * other.translation;
return result;
}
CoordinateFrame PhysicsFrame::toCoordinateFrame() const {
CoordinateFrame f;
f.translation = translation;
f.rotation = rotation.toRotationMatrix();
return f;
}
PhysicsFrame PhysicsFrame::lerp(
const PhysicsFrame& other,
float alpha) const {
PhysicsFrame result;
result.translation = translation.lerp(other.translation, alpha);
result.rotation = rotation.slerp(other.rotation, alpha);
return result;
}
void PhysicsFrame::deserialize(class BinaryInput& b) {
translation.deserialize(b);
rotation.deserialize(b);
}
void PhysicsFrame::serialize(class BinaryOutput& b) const {
translation.serialize(b);
rotation.serialize(b);
}
}; // namespace<|fim▁end|> | /**
@file PhysicsFrame.cpp
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
from __future__ import unicode_literals
import unittest
from io import BytesIO, StringIO
from decimal import Decimal
import threading
from importlib import import_module
from ijson import common
from ijson.backends.python import basic_parse, Lexer
from ijson.compat import IS_PY2
JSON = b'''
{
"docs": [
{
"null": null,
"boolean": false,
"true": true,
"integer": 0,
"double": 0.5,
"exponent": 1.0e+2,
"long": 10000000000,
"string": "\\u0441\\u0442\\u0440\\u043e\\u043a\\u0430 - \xd1\x82\xd0\xb5\xd1\x81\xd1\x82"
},
{
"meta": [[1], {}]
},
{
"meta": {"key": "value"}
},
{
"meta": null
}
]
}
'''
JSON_EVENTS = [
('start_map', None),
('map_key', 'docs'),
('start_array', None),
('start_map', None),
('map_key', 'null'),
('null', None),
('map_key', 'boolean'),
('boolean', False),
('map_key', 'true'),
('boolean', True),
('map_key', 'integer'),
('number', 0),
('map_key', 'double'),
('number', Decimal('0.5')),
('map_key', 'exponent'),
('number', 100),
('map_key', 'long'),
('number', 10000000000),
('map_key', 'string'),
('string', 'строка - тест'),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('start_array', None),
('start_array', None),
('number', 1),
('end_array', None),
('start_map', None),
('end_map', None),
('end_array', None),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('start_map', None),
('map_key', 'key'),
('string', 'value'),
('end_map', None),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('null', None),
('end_map', None),
('end_array', None),
('end_map', None),
]
SCALAR_JSON = b'0'
INVALID_JSONS = [
b'["key", "value",]', # trailing comma
b'["key" "value"]', # no comma
b'{"key": "value",}', # trailing comma
b'{"key": "value" "key"}', # no comma
b'{"key" "value"}', # no colon
b'invalid', # unknown lexeme
b'[1, 2] dangling junk' # dangling junk
]
YAJL1_PASSING_INVALID = INVALID_JSONS[6]
INCOMPLETE_JSONS = [
b'',
b'"test',
b'[',
b'[1',
b'[1,',
b'{',
b'{"key"',
b'{"key":',
b'{"key": "value"',
b'{"key": "value",',
]
STRINGS_JSON = br'''
{
"str1": "",
"str2": "\"",
"str3": "\\",
"str4": "\\\\",
"special\t": "\b\f\n\r\t"
}
'''
NUMBERS_JSON = b'[1, 1.0, 1E2]'
SURROGATE_PAIRS_JSON = b'"\uD83D\uDCA9"'
class Parse(object):
'''
Base class for parsing tests that is used to create test cases for each
available backends.
'''
def test_basic_parse(self):
events = list(self.backend.basic_parse(BytesIO(JSON)))
self.assertEqual(events, JSON_EVENTS)
def test_basic_parse_threaded(self):
thread = threading.Thread(target=self.test_basic_parse)
thread.start()
thread.join()
def test_scalar(self):
events = list(self.backend.basic_parse(BytesIO(SCALAR_JSON)))
self.assertEqual(events, [('number', 0)])
def test_strings(self):
events = list(self.backend.basic_parse(BytesIO(STRINGS_JSON)))
strings = [value for event, value in events if event == 'string']
self.assertEqual(strings, ['', '"', '\\', '\\\\', '\b\f\n\r\t'])
self.assertTrue(('map_key', 'special\t') in events)
def test_surrogate_pairs(self):
event = next(self.backend.basic_parse(BytesIO(SURROGATE_PAIRS_JSON)))
parsed_string = event[1]
self.assertEqual(parsed_string, '💩')
def test_numbers(self):
events = list(self.backend.basic_parse(BytesIO(NUMBERS_JSON)))
types = [type(value) for event, value in events if event == 'number']
self.assertEqual(types, [int, Decimal, Decimal])
def test_invalid(self):
for json in INVALID_JSONS:
# Yajl1 doesn't complain about additional data after the end
# of a parsed object. Skipping this test.
if self.__class__.__name__ == 'YajlParse' and json == YAJL1_PASSING_INVALID:
continue
with self.assertRaises(common.JSONError) as cm:
list(self.backend.basic_parse(BytesIO(json)))
def test_incomplete(self):
for json in INCOMPLETE_JSONS:
with self.assertRaises(common.IncompleteJSONError):
list(self.backend.basic_parse(BytesIO(json)))
def test_utf8_split(self):
buf_size = JSON.index(b'\xd1') + 1
try:
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
except UnicodeDecodeError:
self.fail('UnicodeDecodeError raised')
def test_lazy(self):
# shouldn't fail since iterator is not exhausted
self.backend.basic_parse(BytesIO(INVALID_JSONS[0]))
self.assertTrue(True)
def test_boundary_lexeme(self):
buf_size = JSON.index(b'false') + 1
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
self.assertEqual(events, JSON_EVENTS)
def test_boundary_whitespace(self):
buf_size = JSON.index(b' ') + 1
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
self.assertEqual(events, JSON_EVENTS)
def test_api(self):
self.assertTrue(list(self.backend.items(BytesIO(JSON), '')))
self.assertTrue(list(self.backend.parse(BytesIO(JSON))))
# Generating real TestCase classes for each importable backend
for name in ['python', 'yajl', 'yajl2', 'yajl2_cffi']:
try:
classname = '%sParse' % ''.join(p.capitalize() for p in name.split('_'))
if IS_PY2:
classname = classname.encode('ascii')
locals()[classname] = type(
classname,
(unittest.TestCase, Parse),
{'backend': import_module('ijson.backends.%s' % name)},
)
except ImportError:
pass
class Common(unittest.TestCase):
'''
Backend independent tests. They all use basic_parse imported explicitly from
the python backend to generate parsing events.
'''
def test_object_builder(self):
builder = common.ObjectBuilder()
for event, value in basic_parse(BytesIO(JSON)):
builder.event(event, value)
self.assertEqual(builder.value, {
'docs': [
{
'string': 'строка - тест',
'null': None,
'boolean': False,
'true': True,
'integer': 0,
'double': Decimal('0.5'),
'exponent': 100,
'long': 10000000000,
},
{
'meta': [[1], {}],
},
{
'meta': {'key': 'value'},
},
{
'meta': None,<|fim▁hole|> ],
})
def test_scalar_builder(self):
builder = common.ObjectBuilder()
for event, value in basic_parse(BytesIO(SCALAR_JSON)):
builder.event(event, value)
self.assertEqual(builder.value, 0)
def test_parse(self):
events = common.parse(basic_parse(BytesIO(JSON)))
events = [value
for prefix, event, value in events
if prefix == 'docs.item.meta.item.item'
]
self.assertEqual(events, [1])
def test_items(self):
events = basic_parse(BytesIO(JSON))
meta = list(common.items(common.parse(events), 'docs.item.meta'))
self.assertEqual(meta, [
[[1], {}],
{'key': 'value'},
None,
])
class Stream(unittest.TestCase):
def test_bytes(self):
l = Lexer(BytesIO(JSON))
self.assertEqual(next(l)[1], '{')
def test_string(self):
l = Lexer(StringIO(JSON.decode('utf-8')))
self.assertEqual(next(l)[1], '{')
if __name__ == '__main__':
unittest.main()<|fim▁end|> | }, |
<|file_name|>zone_shattrath_city.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011-2021 Project SkyFire <https://www.projectskyfire.org/>
* Copyright (C) 2008-2021 TrinityCore <http://www.trinitycore.org/>
* Copyright (C) 2005-2021 MaNGOS <https://www.getmangos.eu/>
* Copyright (C) 2006-2014 ScriptDev2 <https://github.com/scriptdev2/scriptdev2/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 3 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/* ScriptData
SDName: Shattrath_City
SD%Complete: 100
SDComment: Quest support: 10004, 10009, 10211. Flask vendors, Teleport to Caverns of Time
SDCategory: Shattrath City
EndScriptData */
/* ContentData
npc_raliq_the_drunk
npc_salsalabim
npc_shattrathflaskvendors
npc_zephyr
npc_kservant
npc_ishanah
npc_khadgar
EndContentData */
#include "ScriptMgr.h"
#include "ScriptedCreature.h"
#include "ScriptedGossip.h"
#include "ScriptedEscortAI.h"
#include "Player.h"
#include "WorldSession.h"
/*######
## npc_raliq_the_drunk
######*/
#define GOSSIP_RALIQ "You owe Sim'salabim money. Hand them over or die!"
enum Raliq
{
SPELL_UPPERCUT = 10966,
QUEST_CRACK_SKULLS = 10009,
FACTION_HOSTILE_RD = 45
};
class npc_raliq_the_drunk : public CreatureScript
{
public:
npc_raliq_the_drunk() : CreatureScript("npc_raliq_the_drunk") { }
bool OnGossipSelect(Player* player, Creature* creature, uint32 /*sender*/, uint32 action) OVERRIDE
{
player->PlayerTalkClass->ClearMenus();
if (action == GOSSIP_ACTION_INFO_DEF+1)
{
player->CLOSE_GOSSIP_MENU();
creature->setFaction(FACTION_HOSTILE_RD);
creature->AI()->AttackStart(player);
}
return true;
}
bool OnGossipHello(Player* player, Creature* creature) OVERRIDE
{
if (player->GetQuestStatus(QUEST_CRACK_SKULLS) == QUEST_STATUS_INCOMPLETE)
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_VENDOR, GOSSIP_RALIQ, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+1);
player->SEND_GOSSIP_MENU(9440, creature->GetGUID());
return true;
}
CreatureAI* GetAI(Creature* creature) const OVERRIDE
{
return new npc_raliq_the_drunkAI(creature);
}
struct npc_raliq_the_drunkAI : public ScriptedAI
{
npc_raliq_the_drunkAI(Creature* creature) : ScriptedAI(creature)
{
m_uiNormFaction = creature->getFaction();
}
uint32 m_uiNormFaction;
uint32 Uppercut_Timer;
void Reset() OVERRIDE
{
Uppercut_Timer = 5000;
me->RestoreFaction();
}
void UpdateAI(uint32 diff) OVERRIDE
{
if (!UpdateVictim())
return;
if (Uppercut_Timer <= diff)
{
DoCastVictim(SPELL_UPPERCUT);
Uppercut_Timer = 15000;
} else Uppercut_Timer -= diff;
DoMeleeAttackIfReady();
}
};
};
/*######
# npc_salsalabim
######*/
enum Salsalabim
{
// Factions
FACTION_HOSTILE_SA = 90,
FACTION_FRIENDLY_SA = 35,
// Quests
QUEST_10004 = 10004,
// Spells
SPELL_MAGNETIC_PULL = 31705
};
class npc_salsalabim : public CreatureScript
{
public:
npc_salsalabim() : CreatureScript("npc_salsalabim") { }
bool OnGossipHello(Player* player, Creature* creature) OVERRIDE
{
if (player->GetQuestStatus(QUEST_10004) == QUEST_STATUS_INCOMPLETE)
{
creature->setFaction(FACTION_HOSTILE_SA);
creature->AI()->AttackStart(player);
}
else
{
if (creature->IsQuestGiver())
player->PrepareQuestMenu(creature->GetGUID());
player->SEND_GOSSIP_MENU(player->GetGossipTextId(creature), creature->GetGUID());
}
return true;
}
CreatureAI* GetAI(Creature* creature) const OVERRIDE
{
return new npc_salsalabimAI(creature);
}
struct npc_salsalabimAI : public ScriptedAI
{
npc_salsalabimAI(Creature* creature) : ScriptedAI(creature) { }
uint32 MagneticPull_Timer;
void Reset() OVERRIDE
{
MagneticPull_Timer = 15000;
me->RestoreFaction();
}
void DamageTaken(Unit* done_by, uint32 &damage) OVERRIDE
{
if (done_by->GetTypeId() == TypeID::TYPEID_PLAYER && me->HealthBelowPctDamaged(20, damage))
{<|fim▁hole|> done_by->ToPlayer()->GroupEventHappens(QUEST_10004, me);
damage = 0;
EnterEvadeMode();
}
}
void UpdateAI(uint32 diff) OVERRIDE
{
if (!UpdateVictim())
return;
if (MagneticPull_Timer <= diff)
{
DoCastVictim(SPELL_MAGNETIC_PULL);
MagneticPull_Timer = 15000;
} else MagneticPull_Timer -= diff;
DoMeleeAttackIfReady();
}
};
};
/*
##################################################
Shattrath City Flask Vendors provides flasks to people exalted with 3 fActions:
Haldor the Compulsive
Arcanist Xorith
Both sell special flasks for use in Outlands 25man raids only,
purchasable for one Mark of Illidari each
Purchase requires exalted reputation with Scryers/Aldor, Cenarion Expedition and The Sha'tar
##################################################
*/
class npc_shattrathflaskvendors : public CreatureScript
{
public:
npc_shattrathflaskvendors() : CreatureScript("npc_shattrathflaskvendors") { }
bool OnGossipSelect(Player* player, Creature* creature, uint32 /*sender*/, uint32 action) OVERRIDE
{
player->PlayerTalkClass->ClearMenus();
if (action == GOSSIP_ACTION_TRADE)
player->GetSession()->SendListInventory(creature->GetGUID());
return true;
}
bool OnGossipHello(Player* player, Creature* creature) OVERRIDE
{
if (creature->GetEntry() == 23484)
{
// Aldor vendor
if (creature->IsVendor() && (player->GetReputationRank(932) == REP_EXALTED) && (player->GetReputationRank(935) == REP_EXALTED) && (player->GetReputationRank(942) == REP_EXALTED))
{
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_VENDOR, GOSSIP_TEXT_BROWSE_GOODS, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_TRADE);
player->SEND_GOSSIP_MENU(11085, creature->GetGUID());
}
else
{
player->SEND_GOSSIP_MENU(11083, creature->GetGUID());
}
}
if (creature->GetEntry() == 23483)
{
// Scryers vendor
if (creature->IsVendor() && (player->GetReputationRank(934) == REP_EXALTED) && (player->GetReputationRank(935) == REP_EXALTED) && (player->GetReputationRank(942) == REP_EXALTED))
{
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_VENDOR, GOSSIP_TEXT_BROWSE_GOODS, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_TRADE);
player->SEND_GOSSIP_MENU(11085, creature->GetGUID());
}
else
{
player->SEND_GOSSIP_MENU(11084, creature->GetGUID());
}
}
return true;
}
};
/*######
# npc_zephyr
######*/
#define GOSSIP_HZ "Take me to the Caverns of Time."
class npc_zephyr : public CreatureScript
{
public:
npc_zephyr() : CreatureScript("npc_zephyr") { }
bool OnGossipSelect(Player* player, Creature* /*creature*/, uint32 /*sender*/, uint32 action) OVERRIDE
{
player->PlayerTalkClass->ClearMenus();
if (action == GOSSIP_ACTION_INFO_DEF+1)
player->CastSpell(player, 37778, false);
return true;
}
bool OnGossipHello(Player* player, Creature* creature) OVERRIDE
{
if (player->GetReputationRank(989) >= REP_REVERED)
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, GOSSIP_HZ, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+1);
player->SEND_GOSSIP_MENU(player->GetGossipTextId(creature), creature->GetGUID());
return true;
}
};
/*######
# npc_kservant
######*/
enum KServant
{
SAY1 = 0,
WHISP1 = 1,
WHISP2 = 2,
WHISP3 = 3,
WHISP4 = 4,
WHISP5 = 5,
WHISP6 = 6,
WHISP7 = 7,
WHISP8 = 8,
WHISP9 = 9,
WHISP10 = 10,
WHISP11 = 11,
WHISP12 = 12,
WHISP13 = 13,
WHISP14 = 14,
WHISP15 = 15,
WHISP16 = 16,
WHISP17 = 17,
WHISP18 = 18,
WHISP19 = 19,
WHISP20 = 20,
WHISP21 = 21
};
class npc_kservant : public CreatureScript
{
public:
npc_kservant() : CreatureScript("npc_kservant") { }
CreatureAI* GetAI(Creature* creature) const OVERRIDE
{
return new npc_kservantAI(creature);
}
struct npc_kservantAI : public npc_escortAI
{
public:
npc_kservantAI(Creature* creature) : npc_escortAI(creature) { }
void WaypointReached(uint32 waypointId) OVERRIDE
{
Player* player = GetPlayerForEscort();
if (!player)
return;
switch (waypointId)
{
case 0:
Talk(SAY1, player);
break;
case 4:
Talk(WHISP1, player);
break;
case 6:
Talk(WHISP2, player);
break;
case 7:
Talk(WHISP3, player);
break;
case 8:
Talk(WHISP4, player);
break;
case 17:
Talk(WHISP5, player);
break;
case 18:
Talk(WHISP6, player);
break;
case 19:
Talk(WHISP7, player);
break;
case 33:
Talk(WHISP8, player);
break;
case 34:
Talk(WHISP9, player);
break;
case 35:
Talk(WHISP10, player);
break;
case 36:
Talk(WHISP11, player);
break;
case 43:
Talk(WHISP12, player);
break;
case 44:
Talk(WHISP13, player);
break;
case 49:
Talk(WHISP14, player);
break;
case 50:
Talk(WHISP15, player);
break;
case 51:
Talk(WHISP16, player);
break;
case 52:
Talk(WHISP17, player);
break;
case 53:
Talk(WHISP18, player);
break;
case 54:
Talk(WHISP19, player);
break;
case 55:
Talk(WHISP20, player);
break;
case 56:
Talk(WHISP21, player);
player->GroupEventHappens(10211, me);
break;
}
}
void MoveInLineOfSight(Unit* who) OVERRIDE
{
if (HasEscortState(STATE_ESCORT_ESCORTING))
return;
Player* player = who->ToPlayer();
if (player && player->GetQuestStatus(10211) == QUEST_STATUS_INCOMPLETE)
{
float Radius = 10.0f;
if (me->IsWithinDistInMap(who, Radius))
{
Start(false, false, who->GetGUID());
}
}
}
void Reset() OVERRIDE { }
};
};
/*######
# npc_ishanah
######*/
#define ISANAH_GOSSIP_1 "Who are the Sha'tar?"
#define ISANAH_GOSSIP_2 "Isn't Shattrath a draenei city? Why do you allow others here?"
class npc_ishanah : public CreatureScript
{
public:
npc_ishanah() : CreatureScript("npc_ishanah") { }
bool OnGossipSelect(Player* player, Creature* creature, uint32 /*sender*/, uint32 action) OVERRIDE
{
player->PlayerTalkClass->ClearMenus();
if (action == GOSSIP_ACTION_INFO_DEF+1)
player->SEND_GOSSIP_MENU(9458, creature->GetGUID());
else if (action == GOSSIP_ACTION_INFO_DEF+2)
player->SEND_GOSSIP_MENU(9459, creature->GetGUID());
return true;
}
bool OnGossipHello(Player* player, Creature* creature) OVERRIDE
{
if (creature->IsQuestGiver())
player->PrepareQuestMenu(creature->GetGUID());
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, ISANAH_GOSSIP_1, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+1);
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, ISANAH_GOSSIP_2, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+2);
player->SEND_GOSSIP_MENU(player->GetGossipTextId(creature), creature->GetGUID());
return true;
}
};
/*######
# npc_khadgar
######*/
#define KHADGAR_GOSSIP_1 "I've heard your name spoken only in whispers, mage. Who are you?"
#define KHADGAR_GOSSIP_2 "Go on, please."
#define KHADGAR_GOSSIP_3 "I see." //6th too this
#define KHADGAR_GOSSIP_4 "What did you do then?"
#define KHADGAR_GOSSIP_5 "What happened next?"
#define KHADGAR_GOSSIP_7 "There was something else I wanted to ask you."
class npc_khadgar : public CreatureScript
{
public:
npc_khadgar() : CreatureScript("npc_khadgar") { }
bool OnGossipSelect(Player* player, Creature* creature, uint32 /*sender*/, uint32 action) OVERRIDE
{
player->PlayerTalkClass->ClearMenus();
switch (action)
{
case GOSSIP_ACTION_INFO_DEF+1:
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, KHADGAR_GOSSIP_2, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+2);
player->SEND_GOSSIP_MENU(9876, creature->GetGUID());
break;
case GOSSIP_ACTION_INFO_DEF+2:
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, KHADGAR_GOSSIP_3, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+3);
player->SEND_GOSSIP_MENU(9877, creature->GetGUID());
break;
case GOSSIP_ACTION_INFO_DEF+3:
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, KHADGAR_GOSSIP_4, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+4);
player->SEND_GOSSIP_MENU(9878, creature->GetGUID());
break;
case GOSSIP_ACTION_INFO_DEF+4:
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, KHADGAR_GOSSIP_5, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+5);
player->SEND_GOSSIP_MENU(9879, creature->GetGUID());
break;
case GOSSIP_ACTION_INFO_DEF+5:
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, KHADGAR_GOSSIP_3, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+6);
player->SEND_GOSSIP_MENU(9880, creature->GetGUID());
break;
case GOSSIP_ACTION_INFO_DEF+6:
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, KHADGAR_GOSSIP_7, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+7);
player->SEND_GOSSIP_MENU(9881, creature->GetGUID());
break;
case GOSSIP_ACTION_INFO_DEF+7:
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, KHADGAR_GOSSIP_1, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+1);
player->SEND_GOSSIP_MENU(9243, creature->GetGUID());
break;
}
return true;
}
bool OnGossipHello(Player* player, Creature* creature) OVERRIDE
{
if (creature->IsQuestGiver())
player->PrepareQuestMenu(creature->GetGUID());
if (player->GetQuestStatus(10211) != QUEST_STATUS_INCOMPLETE)
player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, KHADGAR_GOSSIP_1, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF + 1);
player->SEND_GOSSIP_MENU(9243, creature->GetGUID());
return true;
}
};
void AddSC_shattrath_city()
{
new npc_raliq_the_drunk();
new npc_salsalabim();
new npc_shattrathflaskvendors();
new npc_zephyr();
new npc_kservant();
new npc_ishanah();
new npc_khadgar();
}<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for geology project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname<|fim▁hole|>
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geology.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)<|fim▁end|> | from sys import path |
<|file_name|>intern.js<|end_file_name|><|fim▁begin|>// Learn more about configuring this file at <https://theintern.github.io/intern/#configuration>.
// These default settings work OK for most people. The options that *must* be changed below are the
// packages, suites, excludeInstrumentation, and (if you want functional tests) functionalSuites
define({
// Default desired capabilities for all environments. Individual capabilities can be overridden by any of the
// specified browser environments in the `environments` array below as well. See<|fim▁hole|> maxConcurrency: 2,
// Non-functional test suite(s) to run in each browser
suites: [ 'tests/plugin' ],
// A regular expression matching URLs to files that should not be included in code coverage analysis
excludeInstrumentation: /^(?:tests|node_modules)\//
});<|fim▁end|> | // <https://theintern.github.io/intern/#option-capabilities> for links to the different capabilities options for
// different services.
// Maximum number of simultaneous integration tests that should be executed on the remote WebDriver service |
<|file_name|>list_notifier.py<|end_file_name|><|fim▁begin|># Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import flags
from nova import log as logging
from nova import utils
from nova.exception import ClassNotFound
flags.DEFINE_multistring('list_notifier_drivers',
['nova.notifier.no_op_notifier'],
'List of drivers to send notifications')
FLAGS = flags.FLAGS
LOG = logging.getLogger('nova.notifier.list_notifier')
drivers = None
class ImportFailureNotifier(object):
"""Noisily re-raises some exception over-and-over when notify is called."""
def __init__(self, exception):
self.exception = exception
def notify(self, message):
raise self.exception
def _get_drivers():
"""Instantiates and returns drivers based on the flag values."""
global drivers
if not drivers:
drivers = []
for notification_driver in FLAGS.list_notifier_drivers:
try:
drivers.append(utils.import_object(notification_driver))
except ClassNotFound as e:
drivers.append(ImportFailureNotifier(e))
return drivers
def notify(message):<|fim▁hole|> try:
driver.notify(message)
except Exception as e:
LOG.exception(_("Problem '%(e)s' attempting to send to "
"notification driver %(driver)s." % locals()))
def _reset_drivers():
"""Used by unit tests to reset the drivers."""
global drivers
drivers = None<|fim▁end|> | """Passes notification to mulitple notifiers in a list."""
for driver in _get_drivers(): |
<|file_name|>timeline_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.client.Timeline."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import tensorflow as tf
from tensorflow.python.client import timeline
class TimelineTest(tf.test.TestCase):
def _validateTrace(self, chrome_trace_format):
# Check that the supplied string is valid JSON.
trace = json.loads(chrome_trace_format)
# It should have a top-level key containing events.
self.assertTrue('traceEvents' in trace)
# Every event in the list should have a 'ph' field.
for event in trace['traceEvents']:
self.assertTrue('ph' in event)
def testSimpleTimeline(self):
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
with tf.device('/cpu:0'):
with tf.Session() as sess:
sess.run(
tf.constant(1.0),
options=run_options,
run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
tl = timeline.Timeline(run_metadata.step_stats)
ctf = tl.generate_chrome_trace_format()
self._validateTrace(ctf)
def testTimelineCpu(self):
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
with self.test_session(use_gpu=False) as sess:
const1 = tf.constant(1.0, name='const1')
const2 = tf.constant(2.0, name='const2')
result = tf.add(const1, const2) + const1 * const2
sess.run(result, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
step_stats = run_metadata.step_stats
devices = [d.device for d in step_stats.dev_stats]
self.assertTrue('/job:localhost/replica:0/task:0/cpu:0' in devices)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format()
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_dataflow=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False,
show_dataflow=False)
self._validateTrace(ctf)
def testTimelineGpu(self):
if not tf.test.is_gpu_available(cuda_only=True):
return
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
with self.test_session(force_gpu=True) as sess:
const1 = tf.constant(1.0, name='const1')
const2 = tf.constant(2.0, name='const2')
result = tf.add(const1, const2) + const1 * const2
sess.run(result, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
step_stats = run_metadata.step_stats
devices = [d.device for d in step_stats.dev_stats]
self.assertTrue('/job:localhost/replica:0/task:0/gpu:0' in devices)
self.assertTrue('/gpu:0/stream:all' in devices)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format()
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_dataflow=False)<|fim▁hole|> ctf = tl.generate_chrome_trace_format(show_memory=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False,
show_dataflow=False)
self._validateTrace(ctf)
def testAnalysisAndAllocations(self):
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
config = tf.ConfigProto(device_count={'CPU': 3})
with tf.Session(config=config) as sess:
with tf.device('/cpu:0'):
const1 = tf.constant(1.0, name='const1')
with tf.device('/cpu:1'):
const2 = tf.constant(2.0, name='const2')
with tf.device('/cpu:2'):
result = const1 + const2 + const1 * const2
sess.run(result, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
tl = timeline.Timeline(run_metadata.step_stats)
step_analysis = tl.analyze_step_stats()
ctf = step_analysis.chrome_trace.format_to_string()
self._validateTrace(ctf)
maximums = step_analysis.allocator_maximums
self.assertTrue('cpu' in maximums)
cpu_max = maximums['cpu']
# At least const1 + const2, both float32s (4 bytes each)
self.assertGreater(cpu_max.num_bytes, 8)
self.assertGreater(cpu_max.timestamp, 0)
self.assertTrue('const1' in cpu_max.tensors)
self.assertTrue('const2' in cpu_max.tensors)
def testManyCPUs(self):
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
config = tf.ConfigProto(device_count={'CPU': 3})
with tf.Session(config=config) as sess:
with tf.device('/cpu:0'):
const1 = tf.constant(1.0, name='const1')
with tf.device('/cpu:1'):
const2 = tf.constant(2.0, name='const2')
with tf.device('/cpu:2'):
result = const1 + const2 + const1 * const2
sess.run(result, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
step_stats = run_metadata.step_stats
devices = [d.device for d in step_stats.dev_stats]
self.assertTrue('/job:localhost/replica:0/task:0/cpu:0' in devices)
self.assertTrue('/job:localhost/replica:0/task:0/cpu:1' in devices)
self.assertTrue('/job:localhost/replica:0/task:0/cpu:2' in devices)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format()
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_dataflow=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False)
self._validateTrace(ctf)
tl = timeline.Timeline(step_stats)
ctf = tl.generate_chrome_trace_format(show_memory=False,
show_dataflow=False)
self._validateTrace(ctf)
if __name__ == '__main__':
tf.test.main()<|fim▁end|> | self._validateTrace(ctf)
tl = timeline.Timeline(step_stats) |
<|file_name|>AvahiBroadcaster.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import dbus
import avahi
import socket
from folavirt.networking.NetworkInterface import NetworkInterface
"""
Klasa służy do rozgłąszania czegoś poprzez Avahi
"""
class AvahiBroadcaster():
def __init__(self, name, avahiname, avahiport):
self._service_name = name
self.avahiname = avahiname
self.avahiport = avahiport
self.bus = dbus.SystemBus()
raw_server = self.bus.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER)
self.server = dbus.Interface(raw_server, avahi.DBUS_INTERFACE_SERVER)
self.group = dbus.Interface(self.bus.get_object(avahi.DBUS_NAME, self.server.EntryGroupNew()), avahi.DBUS_INTERFACE_ENTRY_GROUP)
"""
Publikuj
"""
def publish(self):
self.group.AddService(
NetworkInterface().getIndex(),
avahi.PROTO_INET,
0,
self._service_name,
self.avahiname,
'',<|fim▁hole|> )
self.group.Commit()
def unpublish(self):
self.group.Reset()<|fim▁end|> | '',
self.avahiport,
'' |
<|file_name|>repo_info.py<|end_file_name|><|fim▁begin|>"""
Methods to get certain kinds of data from the Github API.
Writes each repo's data to a separate file in a given directory
"""
import json
from mimetools import Message
from StringIO import StringIO
from github_util import *
import time
import sys, os
#####
# CONSTANTS
#####
#constants to specify parameters when calling methods
CONTRIBUTORS = 0
FORKERS = 1
#DOES NOT WORK UNLESS YOU HAVE PUSH ACCESS
COLLABORATORS = 10<|fim▁hole|>#the url path to the information in the api
URL_PATH = {CONTRIBUTORS:'contributors', COLLABORATORS:'collaborators', FORKERS:'forks'}
#the path to the following information in the json
JSON_PATH = {CONTRIBUTORS:('login',), COLLABORATORS:('login',), FORKERS:('owner', 'login')}
#####
# METHODS
#####
"""
Gets all the repos of the users
Returns dictionary {user:reponamelist}
or if forks is True, returns {user:(nonforknamelist, forknamelist)}
"""
def get_repos(users, forks=False):
#a dictionary to store all the {users:repos}
all_repos = {}
for u in users:
repos = []
forked = []
page = 1
while page>0: #api has multiple pages
URLstr = 'https://api.github.com/users/{}/repos'.format(u)
response = api_get(URLstr, parameters={'page':page, 'per_page':100})
if not is_successful_response(response):
print "{}\n{}\n{}\n".format(URLstr, response.status_code, response.text)
break
if has_next_page(response):
page += 1
else:
page = -1
#read the data of the current page
try:
repo_data = json.loads(response.text)
except:
error_dump("{}\n{}\n{}".format(response, URLstr, response.text))
raise e
for repo in repo_data:
#get name of the repo
repoName = JSON_access(repo, ('name',))
#split forks and non-forks, if necessary
isFork = JSON_access(repo, ('fork',))
if forks and isFork:
forked.append(repoName)
else:
repos.append(repoName)
#add to the allRepos dictionary
if forks:
all_repos[u] = (repos,forked)
else:
all_repos[u] = repos
return all_repos
"""
grabs a list of all (contributors|collaborators|forkers) of all the repos
returns a dict of {repo_name : list of people}
"""
def repoPeople(repo_list,group=CONTRIBUTORS):
all_repos = {}
for repo in repo_list:
people = []
page = 1
while page>0:
URLstr = 'https://api.github.com/repos/{}/{}/{}'.format(repo["user"], repo["name"], URL_PATH[group])
response = api_get(baseURL=URLstr, parameters={'page':page,'per_page':100})
if not is_successful_response(response):
print "{}\n{}\n{}\n".format(URLstr, response.status_code, response.text)
break
if has_next_page(response):
page += 1
else:
page = -1
try:
responsePage = json.loads(response.text)
except:
error_dump("{}\n{}\n{}".format(response, URLstr, response.text))
raise e
for contributor in responsePage:
username = JSON_access(contributor, JSON_PATH[group])
if username != None:
people.append(username)
all_repos[repo["name"]] = people
return all_repos
"""
Gets all the pull requests for a repo
"""
def get_pulls(repo):
URLstr = "https://api.github.com/repos/{}/{}/pulls".format(repo["user"], repo["name"])
page = 1
pulls = []
while page>0:
response = api_get(baseURL=URLstr, parameters={'page':page, 'per_page':100, "state":"all"})
if not is_successful_response(response):
print "{}\n{}\n{}\n".format(URLstr, response.status_code, response.text)
break
if has_next_page(response):
page += 1
else:
page = -1
try:
responsePage = json.loads(response.text)
except:
error_dump("{}\n{}\n{}".format(response, URLstr, response.text))
raise e
pulls.extend(responsePage)
return pulls
"""
check if a repo is forked, and if so, return the parent repo in a tuple (reponame, ownername)
else, return None
"""
def parent_repo(repo,user):
URLstr = "https://api.github.com/repos/{}/{}".format(user, repo)
response = api_get(baseURL=URLstr)
if not is_successful_response(response):
print "{}\n{}\n{}\n".format(URLstr, response.status_code, response.text)
return None
page = json.loads(response.text)
parentRepo = None
if JSON_access(page, ('fork',)):
name = JSON_access(page,('parent','name'))
owner = JSON_access(page, ('parent','owner','login'))
if name != None and owner != None:
parentRepo = (name, owner)
return parentRepo
"""
Returns all the commits of a given repo
"""
def get_all_commits(repo):
URLstr = "https://api.github.com/repos/{}/{}/commits".format(repo["user"], repo["name"])
commits = []
page = 1
while page>0:
response = api_get(baseURL=URLstr, parameters={'page':page,'per_page':100})
if not is_successful_response(response):
print "{}\n{}\n{}\n".format(URLstr, response.status_code, response.text)
break
if has_next_page(response):
page += 1
else:
page = -1
try:
responsePage = json.loads(response.text)
except:
error_dump("{}\n{}\n{}".format(response, URLstr, response.text))
raise e
commits.extend(responsePage)
return commits
#####
# TESTS
# make sure I didn't break anything
#####
if __name__=='__main__':
print parent_repo('ReadingJournal', 'poosomooso')
print parent_repo('QingTingCheat', "felixonmars") #should return None and print error because DMCA takedown
print repoPeople([{"name":'EmptyTest', "user":'poosomooso'}], group=CONTRIBUTORS) #print error code and also empty list in dict
print repoPeople([{"name":'Codestellation2015', "user":'IanOlin'}], group=CONTRIBUTORS)
print get_repos(("poosomooso", ))
repos = get_repos(("sindresorhus", )) #guy's got a lot of repos
print len(repos["sindresorhus"]) #over 700
print len(get_all_commits({"name":'CNTK', "user":'Microsoft'})) #check on github for actual number; as of 9/22/16 7899
ai_pulls = get_pulls({"name":"ToolBox-AI", "user":"sd16fall"})
print len(ai_pulls) #4
# print ai_pulls[0]
pass<|fim▁end|> | |
<|file_name|>test_spin_condition.cpp<|end_file_name|><|fim▁begin|>// Copyright (C) 2001-2003
// William E. Kempf
// Copyright (C) 2007 Anthony Williams
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include <boost/date_time/posix_time/posix_time.hpp>
#include <boost/thread.hpp>
#include <boost/task.hpp>
#include <boost/test/unit_test.hpp>
#include <libs/task/test/util.ipp>
namespace tsk = boost::tasks;
namespace pt = boost::posix_time;
struct condition_test_data
{
condition_test_data() : notified(0), awoken(0) { }
tsk::spin::mutex mutex;
tsk::spin::condition condition;
int notified;
int awoken;
};
void condition_test_thread(condition_test_data* data)
{
tsk::spin::mutex::scoped_lock lock(data->mutex);
BOOST_CHECK(lock ? true : false);
while (!(data->notified > 0))
data->condition.wait(lock);
BOOST_CHECK(lock ? true : false);
data->awoken++;
}
struct cond_predicate
{
cond_predicate(int& var, int val) : _var(var), _val(val) { }
bool operator()() { return _var == _val; }
int& _var;<|fim▁hole|>private:
void operator=(cond_predicate&);
};
void condition_test_waits(condition_test_data* data)
{
tsk::spin::mutex::scoped_lock lock(data->mutex);
BOOST_CHECK(lock ? true : false);
// Test wait.
while (data->notified != 1)
data->condition.wait(lock);
BOOST_CHECK(lock ? true : false);
BOOST_CHECK_EQUAL(data->notified, 1);
data->awoken++;
data->condition.notify_one();
// Test predicate wait.
data->condition.wait(lock, cond_predicate(data->notified, 2));
BOOST_CHECK(lock ? true : false);
BOOST_CHECK_EQUAL(data->notified, 2);
data->awoken++;
data->condition.notify_one();
// Test timed_wait.
pt::time_duration xt = pt::seconds(10);
while (data->notified != 3)
data->condition.timed_wait(lock, xt);
BOOST_CHECK(lock ? true : false);
BOOST_CHECK_EQUAL(data->notified, 3);
data->awoken++;
data->condition.notify_one();
// Test predicate timed_wait.
xt = pt::seconds(10);
cond_predicate pred(data->notified, 4);
BOOST_CHECK(data->condition.timed_wait(lock, xt, pred));
BOOST_CHECK(lock ? true : false);
BOOST_CHECK(pred());
BOOST_CHECK_EQUAL(data->notified, 4);
data->awoken++;
data->condition.notify_one();
// Test predicate timed_wait with relative timeout
cond_predicate pred_rel(data->notified, 5);
BOOST_CHECK(data->condition.timed_wait(lock, boost::posix_time::seconds(10), pred_rel));
BOOST_CHECK(lock ? true : false);
BOOST_CHECK(pred_rel());
BOOST_CHECK_EQUAL(data->notified, 5);
data->awoken++;
data->condition.notify_one();
}
void do_test_condition_waits()
{
condition_test_data data;
boost::thread thread(bind(&condition_test_waits, &data));
{
tsk::spin::mutex::scoped_lock lock(data.mutex);
BOOST_CHECK(lock ? true : false);
boost::this_thread::sleep(pt::seconds(1));
data.notified++;
data.condition.notify_one();
while (data.awoken != 1)
data.condition.wait(lock);
BOOST_CHECK(lock ? true : false);
BOOST_CHECK_EQUAL(data.awoken, 1);
boost::this_thread::sleep(pt::seconds(1));
data.notified++;
data.condition.notify_one();
while (data.awoken != 2)
data.condition.wait(lock);
BOOST_CHECK(lock ? true : false);
BOOST_CHECK_EQUAL(data.awoken, 2);
boost::this_thread::sleep(pt::seconds(1));
data.notified++;
data.condition.notify_one();
while (data.awoken != 3)
data.condition.wait(lock);
BOOST_CHECK(lock ? true : false);
BOOST_CHECK_EQUAL(data.awoken, 3);
boost::this_thread::sleep(pt::seconds(1));
data.notified++;
data.condition.notify_one();
while (data.awoken != 4)
data.condition.wait(lock);
BOOST_CHECK(lock ? true : false);
BOOST_CHECK_EQUAL(data.awoken, 4);
boost::this_thread::sleep(pt::seconds(1));
data.notified++;
data.condition.notify_one();
while (data.awoken != 5)
data.condition.wait(lock);
BOOST_CHECK(lock ? true : false);
BOOST_CHECK_EQUAL(data.awoken, 5);
}
thread.join();
BOOST_CHECK_EQUAL(data.awoken, 5);
}
void test_condition_waits()
{
// We should have already tested notify_one here, so
// a timed test with the default execution_monitor::use_condition
// should be OK, and gives the fastest performance
timed_test(&do_test_condition_waits, 12);
}
void do_test_condition_wait_is_a_interruption_point()
{
condition_test_data data;
boost::thread thread(bind(&condition_test_thread, &data));
thread.interrupt();
thread.join();
BOOST_CHECK_EQUAL(data.awoken,0);
}
void test_condition_wait_is_a_interruption_point()
{
timed_test(&do_test_condition_wait_is_a_interruption_point, 1);
}
boost::unit_test_framework::test_suite* init_unit_test_suite(int, char*[])
{
boost::unit_test_framework::test_suite* test =
BOOST_TEST_SUITE("Boost.Task: condition test suite");
test->add(BOOST_TEST_CASE(&test_condition_waits));
test->add(BOOST_TEST_CASE(&test_condition_wait_is_a_interruption_point));
return test;
}<|fim▁end|> | int _val; |
<|file_name|>fields.js<|end_file_name|><|fim▁begin|>var test = require('tap').test;
var CronExpression = require('../lib/expression');
test('Fields are exposed', function(t){
try {
var interval = CronExpression.parse('0 1 2 3 * 1-3,5');
t.ok(interval, 'Interval parsed');
CronExpression.map.forEach(function(field) {
interval.fields[field] = [];
t.throws(function() {
interval.fields[field].push(-1);
}, /Cannot add property .*?, object is not extensible/, field + ' is frozen');
delete interval.fields[field];
});
interval.fields.dummy = [];
t.same(interval.fields.dummy, undefined, 'Fields is frozen');
t.same(interval.fields.second, [0], 'Second matches');
t.same(interval.fields.minute, [1], 'Minute matches');
t.same(interval.fields.hour, [2], 'Hour matches');
t.same(interval.fields.dayOfMonth, [3], 'Day of month matches');
t.same(interval.fields.month, [1,2,3,4,5,6,7,8,9,10,11,12], 'Month matches');
t.same(interval.fields.dayOfWeek, [1,2,3,5], 'Day of week matches');
} catch (err) {
t.error(err, 'Interval parse error');
}
t.end();<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>competency_outcomes.js<|end_file_name|><|fim▁begin|>// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Competency rule config.
*
* @package tool_lp
* @copyright 2015 Frédéric Massart - FMCorz.net
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
define(['jquery',
'core/str'],
function($, Str) {
var OUTCOME_NONE = 0,
OUTCOME_EVIDENCE = 1,
OUTCOME_COMPLETE = 2,
OUTCOME_RECOMMEND = 3;
return /** @alias module:tool_lp/competency_outcomes */ {
NONE: OUTCOME_NONE,
EVIDENCE: OUTCOME_EVIDENCE,
COMPLETE: OUTCOME_COMPLETE,
RECOMMEND: OUTCOME_RECOMMEND,
/**
* Get all the outcomes.
*
* @return {Object} Indexed by outcome code, contains code and name.
* @method getAll
*/
getAll: function() {
var self = this;
return Str.get_strings([
{ key: 'competencyoutcome_none', component: 'tool_lp' },
{ key: 'competencyoutcome_evidence', component: 'tool_lp' },
{ key: 'competencyoutcome_recommend', component: 'tool_lp' },
{ key: 'competencyoutcome_complete', component: 'tool_lp' },
]).then(function(strings) {
var outcomes = {};
outcomes[self.NONE] = { code: self.NONE, name: strings[0] };
outcomes[self.EVIDENCE] = { code: self.EVIDENCE, name: strings[1] };
outcomes[self.RECOMMEND] = { code: self.RECOMMEND, name: strings[2] };
outcomes[self.COMPLETE] = { code: self.COMPLETE, name: strings[3] };
return outcomes;
});
},
/**
* Get the string for an outcome.
*
* @param {Number} id The outcome code.
* @return {Promise Resolved with the string.
* @method getString
*/
getString: function(id) {
var self = this,
all = self.getAll();
return all.then(function(outcomes) {
if (typeof outcomes[id] === 'undefined') {
return $.Deferred().reject().promise();
}
return outcomes[id].name;
});
}
};<|fim▁hole|><|fim▁end|> |
}); |
<|file_name|>metric_space_search.hpp<|end_file_name|><|fim▁begin|>/**
* \file metric_space_search.hpp
*
* This library provides a class that implements a Dynamic Vantage-Point Tree (DVP-Tree) that
* allows for O(logN) time nearest-neighbor queries in a metric-space. A DVP-tree is essentially
* a generalization of a search tree which only requires the space to have a metric which
* respects the triangular inequality.
*
* \author Sven Mikael Persson <[email protected]>
* \date April 2011
*/
/*
* Copyright 2011 Sven Mikael Persson
*
* THIS SOFTWARE IS DISTRIBUTED UNDER THE TERMS OF THE GNU GENERAL PUBLIC LICENSE v3 (GPLv3).
*
* This file is part of ReaK.
*
* ReaK is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ReaK is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ReaK (as LICENSE in the root folder).
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef REAK_METRIC_SPACE_SEARCH_HPP
#define REAK_METRIC_SPACE_SEARCH_HPP
#include <boost/bind.hpp>
#include <boost/lambda/lambda.hpp>
#include <boost/graph/graph_concepts.hpp>
#include <boost/property_map/property_map.hpp>
#include <boost/graph/adjacency_list.hpp>
#include <boost/graph/topology.hpp>
#include <boost/graph/properties.hpp>
#include <map>
#include <vector>
#include "metric_space_concept.hpp"
namespace boost {
enum vertex_mu_distance_t { vertex_mu_distance };
BOOST_INSTALL_PROPERTY(vertex, mu_distance);
};
/**
* This class template is a callable class that can be used to choose the best
* vantage-point to use out of a set of points. In theory, the best vantage-point
* is the one which deviates the most from the other points in the set, however,
* this functor will approximately select that point by searching for it only
* in a random subset of the given range of points.
* \tparam RandomNumberGenerator The random number generator type to be use to obtain the randomness needed in the search.
*/
template <typename RandomNumberGenerator = boost::minstd_rand>
class random_best_vp_chooser {
private:
unsigned int m_divider;
RandomNumberGenerator m_rand;
public:
/**
* Default construction.
* \param aDivider The divider of the set (determines the fraction of the points to search), default is 10.
*/
random_best_vp_chooser(unsigned int aDivider = 10) : m_divider(aDivider), m_rand(std::time(0)) { };
/**
* Parametrized construction.
* \param aRand The random number generator to use.
* \param aDivider The divider of the set (determines the fraction of the points to search), default is 10.
*/
random_best_vp_chooser(const RandomNumberGenerator& aRand, unsigned int aDivider = 10) : m_divider(aDivider), m_rand(aRand) { };
/**
* This call-operator will choose a vantage-point from within the given range.
* \tparam RandomAccessIter A random-access iterator type that can describe the point-range.
* \tparam Topology The topology type on which the points can reside, should model the MetricSpaceConcept.
* \tparam PositionMap The property-map type that can map the vertex descriptors (which should be the value-type of the iterators) to a point (position).
* \param aBegin The start of the range of vertices.
* \param aEnd The end of the range of vertices (one element past the end).
* \param aSpace The topology on which the points reside.
* \param aPosition The property-map used to obtain the positions from the vertices.
* \return A random-access iterator to the chosen vantage-point.
*/
template <typename RandomAccessIter, typename Topology, typename PositionMap>
RandomAccessIter operator() (RandomAccessIter aBegin, RandomAccessIter aEnd, const Topology& aSpace, PositionMap aPosition)
{
// BOOST_CONCEPT_ASSERT((MetricSpaceConcept<Topology>)); // not necessary - just need distance() function
typedef typename metric_topology_traits<Topology>::point_type Point;
RandomAccessIter best_pt = aEnd;
double best_dev = -1;
//std::cout << "Number of loops: " << (aEnd - aBegin) / m_divider + 1 << std::endl;
int numLoops = (aEnd - aBegin) / m_divider + 1;
for(unsigned int i=0; i < (aEnd - aBegin) / m_divider + 1;++i) {
std::cout << i << " out of " << numLoops << std::endl;
RandomAccessIter current_pt = aBegin + (m_rand() % (aEnd - aBegin));
double current_mean = 0.0;
double current_dev = 0.0;
Point current_vp = get(aPosition, *current_pt);
for(unsigned int j=0; aBegin + j != aEnd; ++j) {
double dist = aSpace.distance(current_vp, get(aPosition, *(aBegin + j)));
current_mean = (current_mean * j + dist) / (j + 1);
current_dev = (current_dev * j + dist * dist) / (j + 1);
};
double current_var = current_dev - current_mean * current_mean;
if(current_var < 0) current_var = 0.0;
current_dev = std::sqrt(current_var);
if(current_dev > best_dev) {
best_pt = current_pt;
best_dev = current_dev;
};
};
return best_pt;
};
};
/**
* This class implements a Dynamic Vantage-Point Tree (DVP-Tree) that
* allows for O(logN) time nearest-neighbor queries in a metric-space. A DVP-tree is essentially
* a generalization of a search tree which only requires the space to have a metric which
* respects the triangular inequality.
* \tparam Key The key type for the tree, essentially the key value is the vertex descriptor type.
* \tparam Topology The topology type on which the points can reside, should model the MetricSpaceConcept.
* \tparam PositionMap The property-map type that can map the vertex descriptors (which should be the value-type of the iterators) to a point (position).
* \tparam Arity The arity of the tree, e.g., 2 means a binary-tree.
* \tparam VPChooser The functor type to use to choose the vantage-point out of a set of vertices.
*/
template <typename Key,
typename Topology,
typename PositionMap,
unsigned int Arity = 2,
typename VPChooser = random_best_vp_chooser<> >
class dvp_tree
{
public:
//BOOST_CONCEPT_ASSERT((MetricSpaceConcept<Topology>)); // This actually is not necessary, all that is required is a distance() function.
typedef typename metric_topology_traits<Topology>::point_type point_type;
typedef typename metric_topology_traits<Topology>::point_difference_type point_difference_type;
typedef double distance_type;
private:
typedef boost::property< boost::vertex_index_t, Key,
boost::property< boost::vertex_mu_distance_t, distance_type, boost::no_property > > vertex_properties;
typedef boost::no_property edge_properties;
typedef boost::adjacency_list< boost::listS, boost::listS, boost::bidirectionalS,
vertex_properties,
edge_properties,
boost::listS> tree_indexer;
typedef boost::adjacency_list_traits<boost::listS,boost::listS,boost::bidirectionalS,boost::listS>::vertex_descriptor vertex_type;
typedef boost::adjacency_list_traits<boost::listS,boost::listS,boost::bidirectionalS,boost::listS>::edge_descriptor edge_type;
typedef typename boost::graph_traits<tree_indexer>::out_edge_iterator out_edge_iter;
typedef typename boost::graph_traits<tree_indexer>::in_edge_iterator in_edge_iter;
tree_indexer m_tree;
vertex_type m_root;
typename boost::property_map< tree_indexer, boost::vertex_index_t >::type m_key;
typename boost::property_map< tree_indexer, boost::vertex_mu_distance_t >::type m_mu;
const Topology& m_space;
PositionMap m_position;
VPChooser m_vp_chooser;
//non-copyable.
dvp_tree(const dvp_tree<Key,Topology,PositionMap,Arity,VPChooser>&);
dvp_tree<Key,Topology,PositionMap,Arity,VPChooser>& operator=(const dvp_tree<Key,Topology,PositionMap,Arity,VPChooser>&);
static bool closer(std::map<Key,distance_type>& m, const Key& k1, const Key& k2) {
return m[k1] < m[k2];
};
void construct_node(vertex_type aNode, typename std::vector<Key>::iterator aBegin, typename std::vector<Key>::iterator aEnd, std::map<Key,distance_type>& aDistMap) {
typedef typename std::vector<Key>::iterator KeyIter;
KeyIter vp_ind = m_vp_chooser(aBegin, aEnd, m_space, m_position);
point_type vp_pt = get(m_position, *vp_ind);
for(KeyIter it = aBegin; it != aEnd; ++it) {
aDistMap[*it] = m_space.distance(vp_pt, get(m_position, *it));
};
std::sort(aBegin,aEnd,boost::bind(closer,aDistMap,_1,_2));
put(m_key, aNode, *aBegin);
aBegin++;
if((aEnd - aBegin) < static_cast<int>(Arity)) {
for(KeyIter it = aBegin; it != aEnd; ++it) {
vertex_type k = boost::add_vertex(m_tree);
put(m_key, k, *it);
put(m_mu, k, aDistMap[*it]);
boost::add_edge(aNode,k,m_tree);
};
} else {
for(unsigned int i=Arity;i>=1;--i) {
vertex_type k = boost::add_vertex(m_tree);
boost::add_edge(aNode,k,m_tree);
unsigned int num_children = (aEnd - aBegin) / i;
put(m_mu, k, aDistMap[*(aBegin + (num_children-1))]);
construct_node(k,aBegin,aBegin + num_children,aDistMap);
aBegin = aBegin + num_children;
};
};
};
void find_nearest_impl(const point_type& aPoint, distance_type& aSigma, vertex_type aNode, std::multimap<distance_type, Key>& aList, std::size_t K) const {
using namespace boost;
typedef typename std::multimap<distance_type, Key>::value_type ListType;
Key current_key = get(m_key, aNode);
distance_type current_dist = m_space.distance(aPoint, get(m_position, current_key));
if(current_dist < aSigma) { //is the vantage point within current search bound? Yes...
aList.insert(ListType(current_dist, current_key)); //then add the vantage point to the NN list.
if(aList.size() > K) { //are there too many nearest neighbors? Yes...
aList.erase((++aList.rbegin()).base()); //delete last element to keep aList with K elements
aSigma = aList.rbegin()->first; //distance of the last element is now the search bound aSigma.
};
};
out_edge_iter ei,ei_end;
//first, locate the partition in which aPoint is:
if(out_degree(aNode,m_tree) == 0)
return;
for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) {
if(current_dist < get(m_mu, target(*ei,m_tree)))
break;
};
if(ei == ei_end) --ei; //back-track if the end was reached.
find_nearest_impl(aPoint,aSigma,target(*ei,m_tree),aList,K); //search in the most likely node.
out_edge_iter ei_left = ei;
out_edge_iter ei_right = ei; ++ei_right;
tie(ei,ei_end) = out_edges(aNode,m_tree); //find the bounds again (start and end).
while(true) {
if(ei_left == ei) {
out_edge_iter ei_rightleft = ei_right; --ei_rightleft;
while((ei_right != ei_end) && (get(m_mu,target(*ei_rightleft,m_tree)) < current_dist + aSigma)) {
find_nearest_impl(aPoint,aSigma,target(*ei_right,m_tree),aList,K);
++ei_rightleft; ++ei_right;
};
break;
} else if(ei_right == ei_end) {
out_edge_iter ei_leftleft = ei_left;
while((ei_left != ei) && (get(m_mu,target(*(--ei_leftleft),m_tree)) > current_dist - aSigma)) {
find_nearest_impl(aPoint,aSigma,target(*ei_leftleft,m_tree),aList,K);
--ei_left;
};
break;
} else {
out_edge_iter ei_leftleft = ei_left; --ei_leftleft;
distance_type d1 = get(m_mu,target(*ei_leftleft,m_tree)) + aSigma - current_dist; //greater than 0 if ei_leftleft should be searched.
out_edge_iter ei_rightleft = ei_right; --ei_rightleft;
distance_type d2 = get(m_mu,target(*ei_rightleft,m_tree)) - aSigma - current_dist; //less than 0 if ei_right should be searched.
if(d1 + d2 > 0) { //this means that ei_leftleft's boundary is closer to aPoint.
find_nearest_impl(aPoint,aSigma,target(*ei_leftleft,m_tree),aList,K);
ei_left = ei_leftleft;
} else {
find_nearest_impl(aPoint,aSigma,target(*ei_right,m_tree),aList,K);
++ei_right;
};
};
};
};
vertex_type get_leaf(const point_type& aPoint, vertex_type aNode) {
using namespace boost;
Key current_key = get(m_key, aNode);
distance_type current_dist = m_space.distance(aPoint, get(m_position, current_key));
out_edge_iter ei,ei_end;
//first, locate the partition in which aPoint is:
if(out_degree(aNode,m_tree) == 0)
return aNode;
vertex_type result = aNode;
for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) {
result = target(*ei,m_tree);
if(current_dist < get(m_mu, result))
break;
};
return get_leaf(aPoint,result);
};
vertex_type get_key(Key aVertex, const point_type& aPoint, vertex_type aNode) {
using namespace boost;
Key current_key = get(m_key, aNode);
if(current_key == aVertex) return aNode;
distance_type current_dist = m_space.distance(aPoint, get(m_position, current_key));
out_edge_iter ei,ei_end;
//first, locate the partition in which aPoint is:
if(out_degree(aNode,m_tree) == 0)
return vertex_type();<|fim▁hole|> for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) {
result = target(*ei,m_tree);
if(current_dist < get(m_mu, result))
break;
};
return get_key(aVertex,aPoint,result);
};
void update_mu_upwards(const point_type& aPoint, vertex_type aNode) {
using namespace boost;
if(aNode == m_root) return;
vertex_type parent = source(*(in_edges(aNode,m_tree).first),m_tree);
distance_type dist = m_space.distance(aPoint,get(m_position,get(m_key,parent)));
if(dist > get(m_mu,aNode))
put(m_mu,aNode,dist);
update_mu_upwards(aPoint,parent);
};
bool is_leaf_node(vertex_type aNode) {
using namespace boost;
if(out_degree(aNode,m_tree) == 0) return true;
out_edge_iter ei,ei_end;
for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) {
if(out_degree(target(*ei,m_tree),m_tree) != 0)
return false;
};
return true;
};
bool is_node_full(vertex_type aNode, int& depth_limit) {
using namespace boost;
if(depth_limit < 0)
return false;
if((out_degree(aNode,m_tree) == 0) && (depth_limit == 0))
return true;
--depth_limit;
if((out_degree(aNode,m_tree) == 0) || (out_degree(aNode,m_tree) < Arity))
return false;
out_edge_iter ei,ei_end;
if(is_leaf_node(aNode)) {
if(depth_limit == 0)
return true;
else
return false;
};
for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) {
int new_depth_limit = depth_limit;
if(!is_node_full(target(*ei,m_tree),new_depth_limit)) {
depth_limit = new_depth_limit;
return false;
};
};
return true;
};
void collect_keys(std::vector<Key>& aList, vertex_type aNode) {
using namespace boost;
aList.push_back(get(m_key, aNode));
out_edge_iter ei,ei_end;
for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei)
collect_keys(aList,target(*ei,m_tree));
};
template <typename Predicate>
void collect_keys(std::vector<Key>& aList, vertex_type aNode, Predicate aShouldAdd) {
using namespace boost;
Key k = get(m_key, aNode);
if(aShouldAdd(k))
aList.push_back(k);
out_edge_iter ei,ei_end;
for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei)
collect_keys(aList,target(*ei,m_tree));
};
void clear_node(vertex_type aNode) {
using namespace boost;
if(out_degree(aNode,m_tree) == 0) return;
out_edge_iter ei,ei_end;
std::vector<vertex_type> children;
children.reserve(out_degree(aNode,m_tree));
for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) {
vertex_type v = target(*ei,m_tree);
clear_node(v);
children.push_back(v);
};
for(std::vector<vertex_type>::iterator it = children.begin(); it != children.end(); ++it) {
remove_edge(aNode, *it,m_tree);
remove_vertex(*it,m_tree);
};
};
public:
/**
* Construct the DVP-tree from a graph, topology and property-map.
* \tparam Graph The graph type on which the vertices are taken from, should model the boost::VertexListGraphConcept.
* \param g The graph from which to take the vertices.
* \param aSpace The topology on which the positions of the vertices reside.
* \param aPosition The property-map that can be used to obtain the positions of the vertices.
* \param aVPChooser The vantage-point chooser functor (policy class).
*/
template <typename Graph>
dvp_tree(const Graph& g,
const Topology& aSpace,
PositionMap aPosition,
VPChooser aVPChooser = VPChooser()) :
m_tree(), m_root(),
m_key(boost::get(boost::vertex_index,m_tree)),
m_mu(boost::get(boost::vertex_mu_distance,m_tree)),
m_space(aSpace), m_position(aPosition), m_vp_chooser(aVPChooser) {
using namespace boost;
if(num_vertices(g) == 0) return;
m_root = add_vertex(m_tree);
typename graph_traits<Graph>::vertex_iterator vi,vi_end;
tie(vi,vi_end) = vertices(g);
std::vector<Key> v(vi,vi_end); //Copy the list of vertices to random access memory.
std::map<Key,distance_type> dist_map;
construct_node(m_root, v.begin(), v.end(), dist_map);
};
/**
* Construct the DVP-tree from a range, topology and property-map.
* \tparam ForwardIterator The forward-iterator type from which the vertices can be obtained.
* \param aBegin The start of the range from which to take the vertices.
* \param aEnd The end of the range from which to take the vertices (one-past-last).
* \param aSpace The topology on which the positions of the vertices reside.
* \param aPosition The property-map that can be used to obtain the positions of the vertices.
* \param aVPChooser The vantage-point chooser functor (policy class).
*/
template <typename ForwardIterator>
dvp_tree(ForwardIterator aBegin,
ForwardIterator aEnd,
const Topology& aSpace,
PositionMap aPosition,
VPChooser aVPChooser = VPChooser()) :
m_tree(), m_root(),
m_key(boost::get(boost::vertex_index,m_tree)),
m_mu(boost::get(boost::vertex_mu_distance,m_tree)),
m_space(aSpace), m_position(aPosition), m_vp_chooser(aVPChooser) {
using namespace boost;
if(aBegin == aEnd) return;
m_root = add_vertex(m_tree);
std::vector<Key> v(aBegin,aEnd); //Copy the list of vertices to random access memory.
std::map<Key,distance_type> dist_map;
construct_node(m_root, v.begin(), v.end(), dist_map);
};
/**
* Checks if the DVP-tree is empty.
* \return True if the DVP-tree is empty.
*/
bool empty() const { return (boost::num_vertices(m_tree) == 0); };
/**
* Returns the size of the DVP-tree (the number of vertices it contains.
* \return The size of the DVP-tree (the number of vertices it contains.
*/
std::size_t size() const { return boost::num_vertices(m_tree); };
/**
* Inserts a key-value (vertex).
* \param u The vertex to be added to the DVP-tree.
*/
void insert(Key u) {
using namespace boost;
if(num_vertices(m_tree) == 0) {
m_root = add_vertex(m_tree);
put(m_mu,m_root,0.0);
put(m_key,m_root,u);
return;
};
point_type u_pt = get(m_position, u);
vertex_type u_realleaf = get_leaf(u_pt,m_root);
if(u_realleaf == m_root) { //if the root is the leaf, it requires special attention since no parent exists.
vertex_type u_node = add_vertex(m_tree);
add_edge(u_realleaf,u_node,m_tree);
put(m_key, u_node, u);
put(m_mu, u_node, m_space.distance(u_pt,get(m_position,get(m_key,u_realleaf))));
update_mu_upwards(u_pt,u_realleaf);
return;
};
vertex_type u_leaf = source(*(in_edges(u_realleaf,m_tree).first),m_tree);
if((out_degree(u_leaf,m_tree) < Arity) || (!is_leaf_node(u_leaf))) {
// leaf node is not full of children, an additional child can be added
// (must be reconstructed to keep ordering, but this is a trivial operation O(Arity)).
//OR
// if leaf is not really a leaf, then it means that this sub-tree is definitely not balanced and not full either,
// then all the Keys ought to be collected and u_leaf ought to be reconstructed.
std::vector<Key> key_list;
collect_keys(key_list,u_leaf);
key_list.push_back(u);
clear_node(u_leaf);
std::map<Key,distance_type> dist_map;
construct_node(u_leaf, key_list.begin(), key_list.end(), dist_map);
update_mu_upwards(u_pt,u_leaf);
} else {
//if it is a full-leaf, then this is a leaf node, and it is balanced but full,
// we should then find a non-full parent.
vertex_type p = u_leaf;
int actual_depth_limit = 1;
int last_depth_limit = actual_depth_limit;
while((p != m_root) && (is_node_full(p,last_depth_limit))) {
p = source(*(in_edges(p,m_tree).first),m_tree);
last_depth_limit = ++actual_depth_limit;
};
bool is_p_full = false;
if(p == m_root)
is_p_full = is_node_full(p,last_depth_limit);
if((!is_p_full) && (last_depth_limit >= 0)) {
//this means that we can add our key to the sub-tree of p and reconstruct from there.
std::vector<Key> key_list;
collect_keys(key_list,p);
key_list.push_back(u);
clear_node(p);
std::map<Key,distance_type> dist_map;
construct_node(p, key_list.begin(), key_list.end(), dist_map);
update_mu_upwards(u_pt,p);
} else {
//this means that either the root node is full or there are branches of the tree that are deeper than u_realleaf,
// and thus, in either case, u_realleaf should be expanded.
p = add_vertex(m_tree);
add_edge(u_realleaf,p,m_tree);
put(m_key, p, u);
put(m_mu, p, m_space.distance(u_pt,get(m_position,get(m_key,u_realleaf))));
update_mu_upwards(u_pt,u_realleaf);
};
};
};
/**
* Inserts a range of key-values (vertices).
* \tparam ForwardIterator A forward-iterator type that can be used to obtain the vertices.
* \param aBegin The start of the range from which to take the vertices.
* \param aEnd The end of the range from which to take the vertices (one-past-last).
*/
template <typename ForwardIterator>
void insert(ForwardIterator aBegin, ForwardIterator aEnd) {
if(boost::num_vertices(m_tree) == 0) return;
std::for_each(aBegin,aEnd,boost::bind(&dvp_tree<Key,Topology,PositionMap,Arity,VPChooser>::insert_and_find_nearest,this,_1));
//TODO: There's got to be a better way to insert many elements (most likely a similar strategy to the erase multiple function).
};
/**
* Erases the given vertex from the DVP-tree.
* \param u The vertex to be removed from the DVP-tree.
*/
void erase(Key u) {
using namespace boost;
if(num_vertices(m_tree) == 0) return;
point_type u_pt = get(m_position, u);
vertex_type u_node = get_key(u, u_pt, m_root);
if(u_node == vertex_type()) return;
out_edge_iter ei,ei_end;
std::vector<Key> key_list;
for(tie(ei,ei_end) = out_edges(u_node,m_tree); ei != ei_end; ++ei)
collect_keys(key_list,target(*ei,m_tree));
clear_node(u_node);
std::map<Key,distance_type> dist_map;
construct_node(u_node, key_list.begin(), key_list.end(), dist_map);
};
/**
* Erases the given vertex-range from the DVP-tree.
* \tparam ForwardIterator A forward-iterator type that can be used to obtain the vertices.
* \param aBegin The start of the range from which to take the vertices to be erased.
* \param aEnd The end of the range from which to take the vertices to be erased (one-past-last).
*/
template <typename ForwardIterator>
void erase(ForwardIterator aBegin, ForwardIterator aEnd) {
using namespace boost;
if(num_vertices(m_tree) == 0) return;
typedef std::list< std::pair< vertex_type,std::vector<Key> > > key_listing;
key_listing key_lists; //will hold a list of unique nodes and all their non-erased
for(;aBegin != aEnd;++aBegin) {
point_type u_pt = get(m_position, *aBegin);
vertex_type u_node = get_key(*aBegin, u_pt, m_root);
if(u_node == vertex_type()) continue;
key_lists.push_back( std::make_pair(u_node, std::vector<Key>()) );
for(typename key_listing::iterator it = key_lists.begin(); ((it != key_lists.end()) && (it->first != u_node)); ++it) {
Key v = get(m_key,it->first);
if(get_key(v,get(m_position,v),u_node) == it->first) {
//this means that v is a child of u_node, thus, it's key_list should be merged with u_nodes list.
key_lists.back().second.insert(key_lists.back().second.end(),it->second.begin(),it->second.end());
remove_edge(source(*(in_edges(v,m_tree).first),m_tree),v,m_tree);
remove_vertex(v,m_tree);
key_lists.erase(it--);
};
};
out_edge_iter ei,ei_end;
for(tie(ei,ei_end) = out_edges(u_node,m_tree); ei != ei_end; ++ei)
collect_keys(key_lists.back().second,target(*ei,m_tree),bind<bool>(lambda::unlambda(_1 == std::find(aBegin,aEnd,_2)),aEnd,_1));
clear_node(u_node);
};
//at this point, there should be a list of nodes to be reconstructed along with a list of keys that they should contain.
for(typename key_listing::iterator it = key_lists.begin(); it != key_lists.end(); ++it) {
std::map<Key,distance_type> dist_map;
construct_node(it->first,it->second.begin(),it->second.end(),dist_map);
};
};
/**
* Clears the DVP-tree.
*/
void clear() {
m_tree.clear();
m_root = vertex_type();
};
/**
* Finds the nearest neighbor to a given position.
* \param aPoint The position from which to find the nearest-neighbor of.
* \return The vertex in the DVP-tree that is closest to the given point.
*/
Key find_nearest(const point_type& aPoint) const {
using namespace boost;
if(num_vertices(m_tree) == 0) return Key();
std::multimap<distance_type,Key> m;
distance_type sig = std::numeric_limits<distance_type>::infinity();
find_nearest_impl(aPoint,sig,m_root,m,1);
return m.begin()->second;
};
/**
* Finds the K nearest-neighbors to a given position.
* \param aPoint The position from which to find the nearest-neighbors.
* \param aList Stores, as output, a map of all the K nearest-neighbors to aPoint, the map gives the distance and vertex pairs.
* \param K The number of nearest-neighbors.
*/
void find_nearest(const point_type& aPoint, std::multimap<distance_type, Key>& aList, std::size_t K) const {
using namespace boost;
if(num_vertices(m_tree) == 0) return;
aList.clear();
distance_type sig = std::numeric_limits<distance_type>::infinity();
find_nearest_impl(aPoint,sig,m_root,aList,K);
};
/**
* Finds the nearest-neighbors to a given position within a given range (radius).
* \param aPoint The position from which to find the nearest-neighbors.
* \param aList Stores, as output, a map of all the nearest-neighbors to aPoint, the map gives the distance and vertex pairs.
* \param R The maximum distance value for the nearest-neighbors.
*/
void find_in_range(const point_type& aPoint, std::multimap<distance_type, Key>& aList, distance_type R) const {
using namespace boost;
if(num_vertices(m_tree) == 0) return;
find_nearest_impl(aPoint,R,m_root,aList,num_vertices(m_tree));
};
/**
* Takes a vertex, finds its nearest-neighbor and then it adds it to the DVP-tree.
* \param aVertex The vertex to be added to the DVP-tree.
* \return The nearest-neighbor of the given vertex.
*/
Key insert_and_find_nearest(Key aVertex) {
Key result = find_nearest(get(m_position,aVertex));
insert(aVertex);
};
/**
* Takes a vertex, finds its K nearest-neighbors and then it adds it to the DVP-tree.
* \param aVertex The vertex to be added to the DVP-tree.
* \param aList Stores, as output, a map of all the K nearest-neighbors to aVertex, the map gives the distance and vertex pairs.
* \param K The number of nearest-neighbors.
*/
void insert_and_find_nearest(Key aVertex, std::list<Key>& aList, std::size_t K) {
find_nearest(get(m_position,aVertex),aList,K);
insert(aVertex);
};
/**
* Takes a vertex, finds its nearest-neighbors within a range and then it adds it to the DVP-tree.
* \param aVertex The vertex to be added to the DVP-tree.
* \param aList Stores, as output, a map of all the nearest-neighbors to aVertex, the map gives the distance and vertex pairs.
* \param R The maximum distance value for the nearest-neighbors.
*/
void insert_and_find_in_range(Key aVertex, std::list<Key>& aList, distance_type R) {
find_in_range(get(m_position,aVertex),aList,R);
insert(aVertex);
};
};
template <typename Graph, typename DVPTree>
struct multi_dvp_tree_search {
typename std::map<Graph*, DVPTree*> graph_tree_map;
multi_dvp_tree_search() : graph_tree_map() { };
template <typename Topology, typename PositionMap>
typename boost::graph_traits<Graph>::vertex_descriptor operator()(const typename boost::property_traits<PositionMap>::value_type& p,
Graph& g, const Topology& space, PositionMap position) {
typename std::map<Graph*,DVPTree*>::iterator it = graph_tree_map.find(&g);
if((it != graph_tree_map.end()) && (it->second))
return it->second->find_nearest(p);
else
return typename boost::graph_traits<Graph>::vertex_descriptor();
};
};
#endif<|fim▁end|> | vertex_type result = aNode; |
<|file_name|>SimpleCardProtocol.cc<|end_file_name|><|fim▁begin|>#include "main/SimpleCardProtocol.hh"
#include "bridge/BridgeConstants.hh"
#include "bridge/CardShuffle.hh"
#include "bridge/CardType.hh"
#include "bridge/Position.hh"
#include "engine/SimpleCardManager.hh"
#include "main/Commands.hh"
#include "main/PeerCommandSender.hh"
#include "messaging/CardTypeJsonSerializer.hh"
#include "messaging/FunctionMessageHandler.hh"
#include "messaging/JsonSerializer.hh"
#include "messaging/JsonSerializerUtility.hh"
#include "messaging/UuidJsonSerializer.hh"
#include "Logging.hh"
#include <algorithm>
#include <optional>
#include <string>
#include <tuple>
#include <utility>
namespace Bridge {
namespace Main {
using CardVector = std::vector<CardType>;
using Engine::CardManager;
using Messaging::failure;
using Messaging::Identity;
using Messaging::JsonSerializer;
using Messaging::Reply;
using Messaging::success;
class SimpleCardProtocol::Impl :
public Bridge::Observer<CardManager::ShufflingState> {
public:
Impl(
const Uuid& gameUuid,
std::shared_ptr<PeerCommandSender> peerCommandSender);
bool acceptPeer(
const Identity& identity, const PositionVector& positions);
Reply<> deal(const Identity& identity, const CardVector& cards);
const Uuid gameUuid;
const std::shared_ptr<Engine::SimpleCardManager> cardManager {
std::make_shared<Engine::SimpleCardManager>()};
private:
void handleNotify(const CardManager::ShufflingState& state) override;
bool expectingCards {false};
std::optional<Identity> leaderIdentity;
const std::shared_ptr<PeerCommandSender> peerCommandSender;
};
SimpleCardProtocol::Impl::Impl(
const Uuid& gameUuid,
std::shared_ptr<PeerCommandSender> peerCommandSender) :
gameUuid {gameUuid},
peerCommandSender {std::move(peerCommandSender)}
{
}
void SimpleCardProtocol::Impl::handleNotify(
const CardManager::ShufflingState& state)
{
if (state == CardManager::ShufflingState::REQUESTED) {
if (!leaderIdentity) {
log(LogLevel::DEBUG,
"Simple card protocol: Generating deck");
auto cards = generateShuffledDeck();
assert(cardManager);
cardManager->shuffle(cards.begin(), cards.end());
dereference(peerCommandSender).sendCommand(
JsonSerializer {},
DEAL_COMMAND,
std::pair {GAME_COMMAND, gameUuid},
std::pair {CARDS_COMMAND, std::move(cards)});
} else {
log(LogLevel::DEBUG,
"Simple card protocol: Expecting deck");
expectingCards = true;
}
}
}
bool SimpleCardProtocol::Impl::acceptPeer(
const Identity& identity, const PositionVector& positions)
{
if (std::find(positions.begin(), positions.end(), Positions::NORTH) !=
positions.end()) {
leaderIdentity = identity;
}
return true;
}
Reply<> SimpleCardProtocol::Impl::deal(
const Identity& identity, const CardVector& cards)
{
log(LogLevel::DEBUG, "Deal command from %s", identity);
if (expectingCards && leaderIdentity == identity) {
cardManager->shuffle(cards.begin(), cards.end());
expectingCards = false;
return success();
}
return failure();
}
SimpleCardProtocol::SimpleCardProtocol(
const Uuid& gameUuid,
std::shared_ptr<PeerCommandSender> peerCommandSender) :
impl {
std::make_shared<Impl>(gameUuid, std::move(peerCommandSender))}
{
assert(impl->cardManager);
impl->cardManager->subscribe(impl);
}
bool SimpleCardProtocol::handleAcceptPeer(
const Identity& identity, const PositionVector& positions,
const OptionalArgs&)
{
assert(impl);
return impl->acceptPeer(identity, positions);
}
void SimpleCardProtocol::handleInitialize()
{
}<|fim▁hole|>std::shared_ptr<Messaging::MessageHandler>
SimpleCardProtocol::handleGetDealMessageHandler()
{
return Messaging::makeMessageHandler(
*impl, &Impl::deal, JsonSerializer {},
std::tuple {CARDS_COMMAND});
}
SimpleCardProtocol::SocketVector SimpleCardProtocol::handleGetSockets()
{
return {};
}
std::shared_ptr<CardManager> SimpleCardProtocol::handleGetCardManager()
{
assert(impl);
return impl->cardManager;
}
}
}<|fim▁end|> | |
<|file_name|>cookiesetter.rs<|end_file_name|><|fim▁begin|>/*
* This Source Code Form is subject to the
* terms of the Mozilla Public License, v. 2.0
*
* © Gregor Reitzenstein
*/
use iron::prelude::*;
use iron::AfterMiddleware;
use iron::headers::SetCookie;
use iron::typemap::Key;
use cookie::Cookie;
use api::API;
/// This Struct sets Cookies on outgoing Responses as necessary.
/// (i.e. For auth-tokens)
pub struct CookieSetter;
impl CookieSetter
{
pub fn new(_: &API) -> CookieSetter
{
CookieSetter
}
}
<|fim▁hole|> // If the Request contains a CookieReq struct, set the specified Cookie
if req.extensions.contains::<CookieReq>()
{
let cookievalvec: Vec<[String; 2]> = req.extensions.remove::<CookieReq>().unwrap();
// A Cookie is a slice of two Strings: The key and the associated value
let cookies: Vec<Cookie> = cookievalvec.into_iter().map(|x| Cookie::new(x[1].clone(),x[2].clone())).collect();
res.headers.set(SetCookie(cookies));
}
Ok(res)
}
}
// This Struct notifies CookieSetter to set a cookie.
pub struct CookieReq;
// Key needs to be implented so this Struct can be inserted to req.extensions
impl Key for CookieReq { type Value = Vec<[String; 2]>; }<|fim▁end|> | impl AfterMiddleware for CookieSetter
{
fn after(&self, req: &mut Request, mut res: Response) -> IronResult<Response>
{ |
<|file_name|>sendcoinsdialog.cpp<|end_file_name|><|fim▁begin|>#include "sendcoinsdialog.h"
#include "ui_sendcoinsdialog.h"
#include "walletmodel.h"
#include "bitcoinunits.h"
#include "addressbookpage.h"
#include "optionsmodel.h"
#include "sendcoinsentry.h"
#include "guiutil.h"
#include "askpassphrasedialog.h"
#include "base58.h"
#include <QMessageBox>
#include <QLocale>
#include <QTextDocument>
#include <QScrollBar>
SendCoinsDialog::SendCoinsDialog(QWidget *parent) :
QDialog(parent),
ui(new Ui::SendCoinsDialog),
model(0)
{
ui->setupUi(this);
#ifdef Q_OS_MAC // Icons on push buttons are very uncommon on Mac
ui->addButton->setIcon(QIcon());
ui->clearButton->setIcon(QIcon());
ui->sendButton->setIcon(QIcon());
#endif
addEntry();
connect(ui->addButton, SIGNAL(clicked()), this, SLOT(addEntry()));
connect(ui->clearButton, SIGNAL(clicked()), this, SLOT(clear()));
fNewRecipientAllowed = true;
}
void SendCoinsDialog::setModel(WalletModel *model)
{
this->model = model;
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
{
entry->setModel(model);
}
}
if(model && model->getOptionsModel())
{
setBalance(model->getBalance(), model->getStake(), model->getUnconfirmedBalance(), model->getImmatureBalance());
connect(model, SIGNAL(balanceChanged(qint64, qint64, qint64, qint64)), this, SLOT(setBalance(qint64, qint64, qint64, qint64)));
connect(model->getOptionsModel(), SIGNAL(displayUnitChanged(int)), this, SLOT(updateDisplayUnit()));
}
}
SendCoinsDialog::~SendCoinsDialog()
{
delete ui;
}
void SendCoinsDialog::on_sendButton_clicked()
{
QList<SendCoinsRecipient> recipients;
bool valid = true;
if(!model)
return;
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
{
if(entry->validate())
{
recipients.append(entry->getValue());
}
else
{
valid = false;
}
}
}
if(!valid || recipients.isEmpty())
{
return;
}
// Format confirmation message
QStringList formatted;
foreach(const SendCoinsRecipient &rcp, recipients)
{
#if QT_VERSION < 0x050000
formatted.append(tr("<b>%1</b> to %2 (%3)").arg(BitcoinUnits::formatWithUnit(BitcoinUnits::BTC, rcp.amount), Qt::escape(rcp.label), rcp.address));
#else
formatted.append(tr("<b>%1</b> to %2 (%3)").arg(BitcoinUnits::formatWithUnit(BitcoinUnits::BTC, rcp.amount), rcp.label.toHtmlEscaped(), rcp.address));
#endif
}
fNewRecipientAllowed = false;
QMessageBox::StandardButton retval = QMessageBox::question(this, tr("Confirm send coins"),
tr("Are you sure you want to send %1?").arg(formatted.join(tr(" and "))),
QMessageBox::Yes|QMessageBox::Cancel,
QMessageBox::Cancel);
if(retval != QMessageBox::Yes)
{
fNewRecipientAllowed = true;
return;
}
WalletModel::UnlockContext ctx(model->requestUnlock());
if(!ctx.isValid())
{
// Unlock wallet was cancelled
fNewRecipientAllowed = true;
return;
}
WalletModel::SendCoinsReturn sendstatus = model->sendCoins(recipients);
switch(sendstatus.status)
{
case WalletModel::InvalidAddress:
QMessageBox::warning(this, tr("Send Coins"),
tr("The recipient address is not valid, please recheck."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::InvalidAmount:
QMessageBox::warning(this, tr("Send Coins"),
tr("The amount to pay must be larger than 0."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::AmountExceedsBalance:
QMessageBox::warning(this, tr("Send Coins"),
tr("The amount exceeds your balance."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::AmountWithFeeExceedsBalance:
QMessageBox::warning(this, tr("Send Coins"),
tr("The total exceeds your balance when the %1 transaction fee is included.").
arg(BitcoinUnits::formatWithUnit(BitcoinUnits::BTC, sendstatus.fee)),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::DuplicateAddress:
QMessageBox::warning(this, tr("Send Coins"),
tr("Duplicate address found, can only send to each address once per send operation."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::TransactionCreationFailed:
QMessageBox::warning(this, tr("Send Coins"),
tr("Error: Transaction creation failed."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::TransactionCommitFailed:
QMessageBox::warning(this, tr("Send Coins"),
tr("Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::Aborted: // User aborted, nothing to do
break;
case WalletModel::OK:
accept();
break;
}
fNewRecipientAllowed = true;
}
void SendCoinsDialog::clear()
{
// Remove entries until only one left
while(ui->entries->count())
{
delete ui->entries->takeAt(0)->widget();
}
addEntry();
updateRemoveEnabled();
ui->sendButton->setDefault(true);
}
void SendCoinsDialog::reject()
{
clear();
}
void SendCoinsDialog::accept()
{
clear();
}
SendCoinsEntry *SendCoinsDialog::addEntry()
{
SendCoinsEntry *entry = new SendCoinsEntry(this);
entry->setModel(model);
ui->entries->addWidget(entry);
connect(entry, SIGNAL(removeEntry(SendCoinsEntry*)), this, SLOT(removeEntry(SendCoinsEntry*)));
updateRemoveEnabled();
// Focus the field, so that entry can start immediately
entry->clear();
entry->setFocus();
ui->scrollAreaWidgetContents->resize(ui->scrollAreaWidgetContents->sizeHint());
QCoreApplication::instance()->processEvents();
QScrollBar* bar = ui->scrollArea->verticalScrollBar();
if(bar)
bar->setSliderPosition(bar->maximum());
return entry;
}
void SendCoinsDialog::updateRemoveEnabled()
{
// Remove buttons are enabled as soon as there is more than one send-entry
bool enabled = (ui->entries->count() > 1);
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
{
entry->setRemoveEnabled(enabled);
}
}
setupTabChain(0);
}
void SendCoinsDialog::removeEntry(SendCoinsEntry* entry)
{
delete entry;
updateRemoveEnabled();
}
QWidget *SendCoinsDialog::setupTabChain(QWidget *prev)
{
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
{
prev = entry->setupTabChain(prev);
}
}
QWidget::setTabOrder(prev, ui->addButton);
QWidget::setTabOrder(ui->addButton, ui->sendButton);
return ui->sendButton;
}
void SendCoinsDialog::pasteEntry(const SendCoinsRecipient &rv)
{
if(!fNewRecipientAllowed)
return;
SendCoinsEntry *entry = 0;
// Replace the first entry if it is still unused
if(ui->entries->count() == 1)
{
SendCoinsEntry *first = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(0)->widget());
if(first->isClear())
{
entry = first;
}
}
if(!entry)
{
entry = addEntry();
}
entry->setValue(rv);
}
bool SendCoinsDialog::handleURI(const QString &uri)
{<|fim▁hole|> SendCoinsRecipient rv;
// URI has to be valid
if (GUIUtil::parseBitcoinURI(uri, &rv))
{
CBitcoinAddress address(rv.address.toStdString());
if (!address.IsValid())
return false;
pasteEntry(rv);
return true;
}
return false;
}
void SendCoinsDialog::setBalance(qint64 balance, qint64 stake, qint64 unconfirmedBalance, qint64 immatureBalance)
{
Q_UNUSED(stake);
Q_UNUSED(unconfirmedBalance);
Q_UNUSED(immatureBalance);
if(!model || !model->getOptionsModel())
return;
int unit = model->getOptionsModel()->getDisplayUnit();
ui->labelBalance->setText(BitcoinUnits::formatWithUnit(unit, balance));
}
void SendCoinsDialog::updateDisplayUnit()
{
if(model && model->getOptionsModel())
{
// Update labelBalance with the current balance and the current unit
ui->labelBalance->setText(BitcoinUnits::formatWithUnit(model->getOptionsModel()->getDisplayUnit(), model->getBalance()));
}
}<|fim▁end|> | |
<|file_name|>level0.py<|end_file_name|><|fim▁begin|>import sys,os
#sys.path.append(os.path.join(os.path.dirname(__file__), '../../..'))
#from ethosgame.ethos.level import Level
from ..level import Level
#from ethosgame.ethos.gameobject import GameObject
from ..gameobject import GameObject
#from ethosgame.ethos.drawnobject import DrawnObject
from ..drawnobject import DrawnObject
import pygame
from pygame.locals import *
from pygame import Color, image, font, sprite
class Level0(Level):
def __init__(self):
super(Level0, self).__init__()
self.activeSprites = sprite.RenderClear()
self.drawnSprites = []
self.npc = GameObject(image.load('User.png'), 100,50)
self.activeSprites.add(self.npc)
self.block1 = GameObject(image.load('platform.png'), 100, 400)
self.activeSprites.add(self.block1);
self.mousex = 0
self.mousey = 0
#The highest height our npc
#can climb. If a the dY with a
#point is higher than this, the
#npc will just fall to his death
self.MAX_HILL_HEIGHT = 3
self.toDrawRectTopLeft = (0,0)
self.toDrawRectBottomRight = (0,0)
self.drawing = False
self.pts = []
print "Level 0 initialized."
def update(self, dT):
#print "Running level0"
#Character info
for gobject in self.activeSprites:
if gobject is not self.npc:
if not gobject.rect.colliderect(self.npc.rect):
#if self.npc.vy < 0.3 and (gobject.rect.y >= self.npc.rect.y + self.npc.rect.height):
if self.npc.vy < 0.3:
self.npc.vy += 0.1
else:
self.npc.vy = 0
gobject.update(dT)
collidingPoints = []
for drawnstuff in self.drawnSprites:
for point in drawnstuff.pts:
x = self.npc.rect.collidepoint(point)
if x:
collidingPoints.append(point)
if(len(collidingPoints) > 0):
self.npc.processPointCollision(collidingPoints)
def processKeyDown(self,key):
print "You hit the key " + str(key) + "!"
if key == pygame.K_RIGHT:
self.npc.vx = 0.1<|fim▁hole|> self.mousex = pos[0]
self.mousey = pos[1]
if self.drawing and len(self.pts) < 100:
self.pts.append( pos )
def processMouseButtonDown(self, pos):
print "Ya clicked at " + str(pos[0]) + " " + str(pos[1]) + " ya goof!"
self.drawing = True
self.toDrawRectTopLeft = (pos[0],pos[1])
if len(self.pts) > 0:
self.pts = []
def processMouseButtonUp(self, pos):
print "Ya let go"
if self.drawing is True:
self.drawing = False
self.drawnSprites.append ( DrawnObject(self.pts) )
self.toDrawRectBottomRight = (pos[0], pos[1])<|fim▁end|> |
def processMouseMotion(self,pos):
#print "Your mouse is at " + str(pos[0]) + " " + str(pos[1]) |
<|file_name|>test_classes.py<|end_file_name|><|fim▁begin|>"""
Test the Multinet Class.
"""
import multinet as mn
import networkx as nx
class TestMultinet(object):
def test_build_multinet(self):
"""
Test building Multinet objects.
"""
mg = mn.Multinet()
assert mg.is_directed() == False
mg.add_edge(0, 1, 'L1')
mg.add_edge(0, 1, 'L2')
mg.add_edge(1, 0, 'L2')
mg.add_edge(1, 2, 'L2')
assert 'L1' in mg.layers()
assert 'L2' in mg.layers()
assert len(mg.edgelets) == 3
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 3
# Remove non-existed edge.
mg.remove_edgelet(2, 3, 'L3')
mg.remove_edgelet(0, 1, 'L2')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 2
mg.remove_edgelet(0, 1, 'L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 1
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 1
assert len(mg.empty_layers()) == 1
mg.remove_empty_layers()
assert mg.number_of_layers() == 1
def test_aggregate_edge(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['L2'] == 6
mg.add_edge(0, 1, 'L1', weight=10)<|fim▁hole|> assert mg[0][1][mg.cid]['L1'] == 15
mg.aggregate_edge(2, 3, 'L2', weight=7)
assert mg[2][3][mg.cid]['L2'] == 7
def test_sub_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = mg.sub_layer('L1')
assert type(sg) == nx.Graph
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 1
sg = mg.sub_layer('L2', remove_isolates=True)
assert type(sg) == nx.Graph
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
def test_sub_layers(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
sg = mg.sub_layers(['L1', 'L2'])
assert type(sg) == mn.Multinet
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 2
assert sg.number_of_layers() == 2
sg = mg.sub_layers(['L2', 'L3'], remove_isolates=True)
assert type(sg) == mn.Multinet
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
assert sg.number_of_layers() == 2
def test_aggregated(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
ag = mg.aggregated()
assert type(ag) == nx.Graph
assert ag.number_of_nodes() == 3
assert ag.number_of_edges() == 2
assert ag[1][2]['weight'] == 8
assert ag[1][2]['nlayer'] == 2
def test_merge_layers(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L1', 'L2'])
assert 'L1' not in mg.layers()
assert 'L2' not in mg.layers()
assert 'L1_L2' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1_L2'] == 5
assert mg[1][2][mg.cid]['L1_L2'] == 6
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L2', 'L3'], new_name='LN')
assert 'L2' not in mg.layers()
assert 'L3' not in mg.layers()
assert 'LN' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['LN'] == 8
def test_add_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = nx.Graph()
sg.add_edge(1, 2, weight=7)
sg.add_edge(2, 3)
mg.add_layer(sg, 'L3')
assert mg.number_of_nodes() == 4
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 3
assert mg[1][2][mg.cid]['L2'] == 6
assert mg[1][2][mg.cid]['L3'] == 7
assert mg[2][3][mg.cid]['L3'] == 1
def test_remove_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L3')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 1
assert mg.number_of_layers() == 2
class TestDiMultinet(object):
def test_build_dimultinet(self):
"""
Test building Multinet objects.
"""
mg = mn.DiMultinet()
assert mg.is_directed() == True
mg.add_edge(0, 1, 'L1')
mg.add_edge(0, 1, 'L2')
mg.add_edge(1, 0, 'L2')
mg.add_edge(1, 2, 'L2')
assert 'L1' in mg.layers()
assert 'L2' in mg.layers()
assert len(mg.edgelets) == 4
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 4
# Remove non-existed edge.
mg.remove_edgelet(2, 3, 'L3')
mg.remove_edgelet(0, 1, 'L2')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 3
mg.remove_edgelet(0, 1, 'L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 2
assert len(mg.empty_layers()) == 1
mg.remove_empty_layers()
assert mg.number_of_layers() == 1
def test_aggregate_edge(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['L2'] == 6
mg.add_edge(0, 1, 'L1', weight=10)
assert mg[0][1][mg.cid]['L1'] == 10
mg.aggregate_edge(0, 1, 'L1', weight=5)
assert mg[0][1][mg.cid]['L1'] == 15
mg.aggregate_edge(2, 3, 'L2', weight=7)
assert mg[2][3][mg.cid]['L2'] == 7
def test_sub_layer(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = mg.sub_layer('L1')
assert type(sg) == nx.DiGraph
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 1
sg = mg.sub_layer('L2', remove_isolates=True)
assert type(sg) == nx.DiGraph
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
def test_sub_layers(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
sg = mg.sub_layers(['L1', 'L2'])
assert type(sg) == mn.DiMultinet
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 2
assert sg.number_of_layers() == 2
sg = mg.sub_layers(['L2', 'L3'], remove_isolates=True)
assert type(sg) == mn.DiMultinet
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
assert sg.number_of_layers() == 2
def test_aggregated(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
ag = mg.aggregated()
assert type(ag) == nx.DiGraph
assert ag.number_of_nodes() == 3
assert ag.number_of_edges() == 2
assert ag[1][2]['weight'] == 8
assert ag[1][2]['nlayer'] == 2
def test_merge_layers(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L1', 'L2'])
assert 'L1' not in mg.layers()
assert 'L2' not in mg.layers()
assert 'L1_L2' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1_L2'] == 5
assert mg[1][2][mg.cid]['L1_L2'] == 6
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L2', 'L3'], new_name='LN')
assert 'L2' not in mg.layers()
assert 'L3' not in mg.layers()
assert 'LN' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['LN'] == 8
def test_add_layer(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = nx.Graph()
sg.add_edge(1, 2, weight=7)
sg.add_edge(2, 3)
mg.add_layer(sg, 'L3')
assert mg.number_of_nodes() == 4
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 3
assert mg[1][2][mg.cid]['L2'] == 6
assert mg[1][2][mg.cid]['L3'] == 7
assert mg[2][3][mg.cid]['L3'] == 1
def test_remove_layer(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L3')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 1
assert mg.number_of_layers() == 2
def test_to_undirected(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(2, 1, 'L3', weight=2)
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 3
nmg = mg.to_undirected()
assert nmg.number_of_nodes() == 3
assert nmg.number_of_edges() == 2
assert nmg.number_of_layers() == 3<|fim▁end|> | assert mg[0][1][mg.cid]['L1'] == 10
mg.aggregate_edge(0, 1, 'L1', weight=5) |
<|file_name|>apkhm.py<|end_file_name|><|fim▁begin|>from skimage import measure
import numpy as np
import struct
import math as m
from PIL import Image
from simplify import simplify
import argparse
parser = argparse.ArgumentParser(description='convert apk heightmaps to floating point tiff')
parser.add_argument('file', type=str, help='the apk heightmap file')
args = parser.parse_args()
hdr=b'\x33\x13\x26\xc3\x33\x13\x26\x43\x02\x00\x20\xc1\x33\x13\xa1\x43'
with open(args.file, mode='rb') as file:
raw = file.read()
print(struct.unpack_from("<4xIII",raw,0x1020))
print(struct.unpack_from("<ffff",raw,0x1030))
t,w,h = struct.unpack_from("<4xIII",raw,0x1020)
e1,e2,e3,e4 = struct.unpack_from("<ffff",raw,0x1030)
dt = np.dtype("half")
dt = dt.newbyteorder('<')
img = np.frombuffer(raw,dtype=dt,offset=0x1040,count=w*h)
print (img.shape)
img = img.reshape((w,h))
<|fim▁hole|> imin = np.amin(img)
imax = np.amax(img)
extents = np.array((e1,e2,e3,e4))
np.savez_compressed(args.file, extents = extents, heightmap=img)
fimg = img.astype(np.float32)
fimg.reshape((w*h,1))
pimg = Image.frombytes('F',(w,h), fimg.tostring(),'raw','F;32NF')
pimg.save(args.file + ".tif")
hmin = e1 * (1-imin) + e2 * imin
hmax = e1 * (1-imax) + e2 * imax
contours = []
hstep = 2.5
nc = m.ceil((hmax-hmin)/hstep)
for i in range(nc):
hgt = imin + i*hstep/(hmax-hmin)
npc = measure.find_contours(img, hgt)
cs = []
for c in npc:
c = simplify(c,5,True)
cs.append(c)
cs = np.array(cs)
contours.append(cs)
np.savez_compressed(args.file+"-contours", *contours)
# mi,ma = float(np.amin(img)),float(np.amax(img))
# print("contour",mi,ma)
# for i in range(50):
# d = float(mi*(1-i/50)+ma*i/50)
# print("contour",d)
# npc = measure.find_contours(img, d)
# for n,c in enumerate(npc):
# contours = [((x[1]-512)/1024*3499.99975586*2,(x[0]-512)/1024*3499.99975586*2) for x in c]
# if norm(c[-1] - c[0]) < 0.01:
# self.canvas.create_polygon(contours,fill="",outline='red',tag="contour")
# else:
# self.canvas.create_line(contours,fill='green',tag="contour")
# except FileNotFoundError:
# print("file not found!")
# return
# try:
# self.img = Image.open(path)
# except:
# try:
# with open(path, mode='rb') as file:
# raw = file.read()
# self.img = Image.frombytes("F",(1024,1024),raw,"raw","F;16")
# print(self.img.getpixel((4,4)))
# f = 1.0 / 2**8
# self.img = self.img.point(lambda x: x * f)
# print(self.img.getpixel((4,4)))
# self.img = self.img.resize((8192,8192))
# self.img = self.img.filter(ImageFilter.CONTOUR)
# except FileNotFoundError:
# print("file not found!")
# return
# self.ix =2*3499.99975586
# f = self.ix/2049.0
# print (f)
# #self.img = self.img.transform((int(self.ix),int(self.ix)),Image.AFFINE,data=(f,0,0,0,f,0))
# self.img = self.img.resize((int(self.ix),int(self.ix)))
# self.simg = self.img
# self.pimg = ImageTk.PhotoImage(self.img)
# self.imgcid = self.canvas.create_image(-2048, -2048, image=self.pimg, anchor=tk.NW)<|fim▁end|> | |
<|file_name|>make_supportedsites.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import unicode_literals
import io
import optparse
import os
import sys
# Import youtube_dl
ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')<|fim▁hole|>def main():
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
options, args = parser.parse_args()
if len(args) != 1:
parser.error('Expected an output filename')
outfile, = args
def gen_ies_md(ies):
for ie in ies:
ie_md = '**{0}**'.format(ie.IE_NAME)
ie_desc = getattr(ie, 'IE_DESC', None)
if ie_desc is False:
continue
if ie_desc is not None:
ie_md += ': {0}'.format(ie.IE_DESC)
if not ie.working():
ie_md += ' (Currently broken)'
yield ie_md
ies = sorted(youtube_dl.gen_extractors(), key=lambda i: i.IE_NAME.lower())
out = '# Supported sites\n' + ''.join(
' - ' + md + '\n'
for md in gen_ies_md(ies))
with io.open(outfile, 'w', encoding='utf-8') as outf:
outf.write(out)
if __name__ == '__main__':
main()<|fim▁end|> | sys.path.insert(0, ROOT_DIR)
import youtube_dl
|
<|file_name|>messages-middleware.js<|end_file_name|><|fim▁begin|>import { LOAD_MORE_MESSAGES, requestMessages, getNextOffset } from '../modules/message';
const loadMoreHandler = ({ store, action }) => {
if (action.type !== LOAD_MORE_MESSAGES) {<|fim▁hole|> return;
}
const { type, key } = action.payload;
const collectionState = store.getState().message.messagesCollections[type][key];
const offset = getNextOffset(collectionState);
const { params = {} } = collectionState.request;
store.dispatch(requestMessages(type, key, { ...params, offset }));
};
export default store => next => (action) => {
const result = next(action);
loadMoreHandler({ store, action });
return result;
};<|fim▁end|> | |
<|file_name|>immutable-types.js<|end_file_name|><|fim▁begin|>/*
* decaffeinate suggestions:
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
//##############################################################################<|fim▁hole|>// Copyright (C) 2017, Sagemath Inc.
// AGPLv3
//
//##############################################################################
/*
Custom Prop Validation for immutable.js types, so they work just like other
React prop-types.
FUTURE: Put prop validation code in a debug area so that it doesn't get loaded for production
In addition to React Prop checks, we implement the following type checkers:
immutable,
immutable.List,
immutable.Map,
immutable.Set,
immutable.Stack,
which may be chained with .isRequired just like normal React prop checks
Additional validations may be added with the following signature
rtypes.custom_checker_name<function (
props,
propName,
componentName,
location,
propFullName,
secret
) => <Error-Like-Object or null>
>
Check React lib to see if this has changed.
*/
const check_is_immutable = function(
props,
propName,
componentName,
location,
propFullName
) {
if (componentName == null) {
componentName = "ANONYMOUS";
}
if (props[propName] == null || props[propName].toJS != null) {
return null;
} else {
const type = typeof props[propName];
return new Error(
`Invalid prop \`${propName}\` of` +
` type ${type} supplied to` +
` \`${componentName}\`, expected an immutable collection or frozen object.`
);
}
};
const allow_isRequired = function(validate) {
const check_type = function(
isRequired,
props,
propName,
componentName,
location
) {
if (componentName == null) {
componentName = "ANONYMOUS";
}
if (props[propName] == null && isRequired) {
return new Error(
`Required prop \`${propName}\` was not specified in \`${componentName}\``
);
}
return validate(props, propName, componentName, location);
};
const chainedCheckType = check_type.bind(null, false);
chainedCheckType.isRequired = check_type.bind(null, true);
chainedCheckType.isRequired.category = "IMMUTABLE";
chainedCheckType.category = "IMMUTABLE";
return chainedCheckType;
};
const create_immutable_type_required_chain = function(validate) {
const check_type = function(
immutable_type_name,
props,
propName,
componentName
) {
if (componentName == null) {
componentName = "ANONYMOUS";
}
if (immutable_type_name && props[propName] != null) {
const T = immutable_type_name;
if (props[propName].toJS == null) {
return new Error(
`NOT EVEN IMMUTABLE, wanted immutable.${T} ${props}, ${propName}`
);
}
if (require("immutable")[`${T}`][`is${T}`](props[propName])) {
return null;
} else {
return new Error(
`Component \`${componentName}\`` +
` expected ${propName} to be an immutable.${T}` +
` but was supplied ${props[propName]}`
);
}
} else {
return validate(props, propName, componentName, location);
}
};
// To add more immutable.js types, mimic code below.
const check_immutable_chain = allow_isRequired(
check_type.bind(null, undefined)
);
check_immutable_chain.Map = allow_isRequired(check_type.bind(null, "Map"));
check_immutable_chain.List = allow_isRequired(check_type.bind(null, "List"));
check_immutable_chain.Set = allow_isRequired(check_type.bind(null, "Set"));
check_immutable_chain.Stack = allow_isRequired(
check_type.bind(null, "Stack")
);
check_immutable_chain.category = "IMMUTABLE";
return check_immutable_chain;
};
exports.immutable = create_immutable_type_required_chain(check_is_immutable);<|fim▁end|> | //
// CoCalc: Collaborative web-based calculation |
<|file_name|>HybridVAControl_PROFILED.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>"""
@file HybridVAControl.py
@author Craig Rafter
@date 19/08/2016
class for fixed time signal control
"""
import signalControl, readJunctionData, traci
from math import atan2, degrees
import numpy as np
from collections import defaultdict
class HybridVAControl(signalControl.signalControl):
def __init__(self, junctionData, minGreenTime=10, maxGreenTime=60, scanRange=250, packetRate=0.2):
super(HybridVAControl, self).__init__()
self.junctionData = junctionData
self.firstCalled = self.getCurrentSUMOtime()
self.lastCalled = self.getCurrentSUMOtime()
self.lastStageIndex = 0
traci.trafficlights.setRedYellowGreenState(self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString)
self.packetRate = int(1000*packetRate)
self.transition = False
self.CAMactive = False
# dict[vehID] = [position, heading, velocity, Tdetect]
self.newVehicleInfo = {}
self.oldVehicleInfo = {}
self.scanRange = scanRange
self.jcnCtrlRegion = self._getJncCtrlRegion()
# print(self.junctionData.id)
# print(self.jcnCtrlRegion)
self.controlledLanes = traci.trafficlights.getControlledLanes(self.junctionData.id)
# dict[laneID] = [heading, shape]
self.laneDetectionInfo = self._getIncomingLaneInfo()
self.stageTime = 0.0
self.minGreenTime = minGreenTime
self.maxGreenTime = maxGreenTime
self.secondsPerMeterTraffic = 0.45
self.nearVehicleCatchDistance = 25
self.extendTime = 1.0 # 5 m in 10 m/s (acceptable journey 1.333)
self.laneInductors = self._getLaneInductors()
def process(self):
# Packets sent on this step
# packet delay + only get packets towards the end of the second
if (not self.getCurrentSUMOtime() % self.packetRate) and (self.getCurrentSUMOtime() % 1000 > 500):
self.CAMactive = True
self._getCAMinfo()
else:
self.CAMactive = False
# Update stage decisions
# If there's no ITS enabled vehicles present use VA ctrl
if len(self.oldVehicleInfo) < 1 and not self.getCurrentSUMOtime() % 1000:
detectTimePerLane = self._getLaneDetectTime()
#print(detectTimePerLane)
# Set adaptive time limit
#print(detectTimePerLane < 3)
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If active and on the second, or transition then make stage descision
elif (self.CAMactive and not self.getCurrentSUMOtime() % 1000) or self.transition:
oncomingVeh = self._getOncomingVehicles()
# If new stage get furthest from stop line whose velocity < 5% speed
# limit and determine queue length
if self.transition:
furthestVeh = self._getFurthestStationaryVehicle(oncomingVeh)
if furthestVeh[0] != '':
meteredTime = self.secondsPerMeterTraffic*furthestVeh[1]
self.stageTime = max(self.minGreenTime, meteredTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If we're in this state this should never happen but just in case
else:
self.stageTime = self.minGreenTime
# If currently staging then extend time if there are vehicles close
# to the stop line
else:
nearestVeh = self._getNearestVehicle(oncomingVeh)
# If a vehicle detected
if nearestVeh != '' and nearestVeh[1] <= self.nearVehicleCatchDistance:
if (self.oldVehicleInfo[nearestVeh[0]][2] != 1e6
and self.oldVehicleInfo[nearestVeh[0]][2] > 1.0/self.secondsPerMeterTraffic):
meteredTime = nearestVeh[1]/self.oldVehicleInfo[nearestVeh[0]][2]
else:
meteredTime = self.secondsPerMeterTraffic*nearestVeh[1]
elapsedTime = 0.001*(self.getCurrentSUMOtime() - self.lastCalled)
Tremaining = self.stageTime - elapsedTime
self.stageTime = elapsedTime + max(meteredTime, Tremaining)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# no detectable near vehicle try inductive loop info
elif nearestVeh == '' or nearestVeh[1] <= self.nearVehicleCatchDistance:
detectTimePerLane = self._getLaneDetectTime()
print('Loops2')
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
else:
pass
# process stage as normal
else:
pass
# print(self.stageTime)
self.transition = False
if self.transitionObject.active:
# If the transition object is active i.e. processing a transition
pass
elif (self.getCurrentSUMOtime() - self.firstCalled) < (self.junctionData.offset*1000):
# Process offset first
pass
elif (self.getCurrentSUMOtime() - self.lastCalled) < self.stageTime*1000:
# Before the period of the next stage
pass
else:
# Not active, not in offset, stage not finished
if len(self.junctionData.stages) != (self.lastStageIndex)+1:
# Loop from final stage to first stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[self.lastStageIndex+1].controlString)
self.lastStageIndex += 1
else:
# Proceed to next stage
#print(0.001*(self.getCurrentSUMOtime() - self.lastCalled))
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[0].controlString)
self.lastStageIndex = 0
#print(0.001*(self.getCurrentSUMOtime() - self.lastCalled))
self.lastCalled = self.getCurrentSUMOtime()
self.transition = True
self.stageTime = 0.0
super(HybridVAControl, self).process()
def _getHeading(self, currentLoc, prevLoc):
dy = currentLoc[1] - prevLoc[1]
dx = currentLoc[0] - prevLoc[0]
if currentLoc[1] == prevLoc[1] and currentLoc[0] == prevLoc[0]:
heading = -1
else:
if dy >= 0:
heading = degrees(atan2(dy, dx))
else:
heading = 360 + degrees(atan2(dy, dx))
# Map angle to make compatible with SUMO heading
if 0 <= heading <= 90:
heading = 90 - heading
elif 90 < heading < 360:
heading = 450 - heading
return heading
def _getJncCtrlRegion(self):
jncPosition = traci.junction.getPosition(self.junctionData.id)
otherJuncPos = [traci.junction.getPosition(x) for x in traci.trafficlights.getIDList() if x != self.junctionData.id]
ctrlRegion = {'N':jncPosition[1]+self.scanRange, 'S':jncPosition[1]-self.scanRange,
'E':jncPosition[0]+self.scanRange, 'W':jncPosition[0]-self.scanRange}
TOL = 10 # Exclusion region around junction boundary
if otherJuncPos != []:
for pos in otherJuncPos:
dx = jncPosition[0] - pos[0]
dy = jncPosition[1] - pos[1]
# North/South Boundary
if abs(dy) < self.scanRange:
if dy < -TOL:
ctrlRegion['N'] = min(pos[1] - TOL, ctrlRegion['N'])
elif dy > TOL:
ctrlRegion['S'] = max(pos[1] + TOL, ctrlRegion['S'])
else:
pass
else:
pass
# East/West Boundary
if abs(dx) < self.scanRange:
if dx < -TOL:
ctrlRegion['E'] = min(pos[0] - TOL, ctrlRegion['E'])
elif dx > TOL:
ctrlRegion['W'] = max(pos[0] + TOL, ctrlRegion['W'])
else:
pass
else:
pass
return ctrlRegion
def _isInRange(self, vehID):
vehPosition = np.array(traci.vehicle.getPosition(vehID))
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
distance = np.linalg.norm(vehPosition - jcnPosition)
if (distance < self.scanRange
and self.jcnCtrlRegion['W'] <= vehPosition[0] <= self.jcnCtrlRegion['E']
and self.jcnCtrlRegion['S'] <= vehPosition[1] <= self.jcnCtrlRegion['N']):
return True
else:
return False
def _getVelocity(self, vehID, vehPosition, Tdetect):
if vehID in self.oldVehicleInfo.keys():
oldX = np.array(self.oldVehicleInfo[vehID][0])
newX = np.array(vehPosition)
dx = np.linalg.norm(newX - oldX)
dt = Tdetect - self.oldVehicleInfo[vehID][3]
velocity = dx/dt
return velocity
else:
return 1e6
def _getCAMinfo(self):
self.oldVehicleInfo = self.newVehicleInfo.copy()
self.newVehicleInfo = {}
Tdetect = 0.001*self.getCurrentSUMOtime()
for vehID in traci.vehicle.getIDList():
if traci.vehicle.getTypeID(vehID) == 'typeITSCV' and self._isInRange(vehID):
vehPosition = traci.vehicle.getPosition(vehID)
vehHeading = traci.vehicle.getAngle(vehID)
vehVelocity = self._getVelocity(vehID, vehPosition, Tdetect)
self.newVehicleInfo[vehID] = [vehPosition, vehHeading, vehVelocity, Tdetect]
def _getIncomingLaneInfo(self):
laneInfo = defaultdict(list)
for lane in list(np.unique(np.array(self.controlledLanes))):
shape = traci.lane.getShape(lane)
width = traci.lane.getWidth(lane)
heading = self._getHeading(shape[1], shape[0])
dx = shape[0][0] - shape[1][0]
dy = shape[0][1] - shape[1][1]
if abs(dx) > abs(dy):
roadBounds = ((shape[0][0], shape[0][1] + width), (shape[1][0], shape[1][1] - width))
else:
roadBounds = ((shape[0][0] + width, shape[0][1]), (shape[1][0] - width, shape[1][1]))
laneInfo[lane] = [heading, roadBounds]
return laneInfo
def _getOncomingVehicles(self):
# Oncoming if (in active lane & heading matches oncoming heading &
# is in lane bounds)
activeLanes = self._getActiveLanes()
vehicles = []
for lane in activeLanes:
for vehID in self.oldVehicleInfo.keys():
# If on correct heading pm 10deg
if (np.isclose(self.oldVehicleInfo[vehID][1], self.laneDetectionInfo[lane][0], atol=10)
# If in lane x bounds
and min(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0]) <
self.oldVehicleInfo[vehID][0][0] <
max(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0])
# If in lane y bounds
and min(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1]) <
self.oldVehicleInfo[vehID][0][1] <
max(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1])):
# Then append vehicle
vehicles.append(vehID)
vehicles = list(np.unique(np.array(vehicles)))
return vehicles
def _getActiveLanes(self):
# Get the current control string to find the green lights
stageCtrlString = self.junctionData.stages[self.lastStageIndex].controlString
activeLanes = []
for i, letter in enumerate(stageCtrlString):
if letter == 'G':
activeLanes.append(self.controlledLanes[i])
# Get a list of the unique active lanes
activeLanes = list(np.unique(np.array(activeLanes)))
return activeLanes
def _getLaneInductors(self):
laneInductors = defaultdict(list)
for loop in traci.inductionloop.getIDList():
loopLane = traci.inductionloop.getLaneID(loop)
if loopLane in self.controlledLanes and 'upstream' not in loop:
laneInductors[loopLane].append(loop)
return laneInductors
def _getFurthestStationaryVehicle(self, vehIDs):
furthestID = ''
maxDistance = -1
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
speedLimit = traci.lane.getMaxSpeed(self._getActiveLanes()[0])
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - jcnPosition)
if distance > maxDistance and self.oldVehicleInfo[ID][2] < 0.05*speedLimit:
furthestID = ID
maxDistance = distance
return [furthestID, maxDistance]
def _getNearestVehicle(self, vehIDs):
nearestID = ''
minDistance = self.nearVehicleCatchDistance + 1
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - jcnPosition)
if distance < minDistance:
nearestID = ID
minDistance = distance
return [nearestID, minDistance]
def _getLaneDetectTime(self):
activeLanes = self._getActiveLanes()
meanDetectTimePerLane = np.zeros(len(activeLanes))
for i, lane in enumerate(activeLanes):
detectTimes = []
for loop in self.laneInductors[lane]:
detectTimes.append(traci.inductionloop.getTimeSinceDetection(loop))
meanDetectTimePerLane[i] = np.mean(detectTimes)
return meanDetectTimePerLane<|fim▁end|> | |
<|file_name|>test_cisco_switch_protocol.py<|end_file_name|><|fim▁begin|># Copyright 2015-2016 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from tests.cisco import enable, create_interface_vlan, configuring, configuring_interface_vlan, \
assert_interface_configuration, remove_vlan, create_vlan, set_interface_on_vlan, configuring_interface, \
revert_switchport_mode_access, create_port_channel_interface, configuring_port_channel
from tests.util.protocol_util import SshTester, TelnetTester, with_protocol, ProtocolTest
class TestCiscoSwitchProtocol(ProtocolTest):
__test__ = False
test_switch = "cisco"
@with_protocol
def test_enable_command_requires_a_password(self, t):
t.write("enable")
t.read("Password: ")
t.write_invisible(t.conf["extra"]["password"])
t.read("my_switch#")
@with_protocol
def test_wrong_password(self, t):
t.write("enable")
t.read("Password: ")
t.write_invisible("hello_world")
t.readln("% Access denied")
t.readln("")
t.read("my_switch>")
@with_protocol
def test_no_password_works_for_legacy_reasons(self, t):
t.write("enable")
t.read("Password: ")
t.write_invisible("")
t.read("my_switch#")
@with_protocol
def test_exiting_loses_the_connection(self, t):
t.write("enable")
t.read("Password: ")
t.write_invisible(t.conf["extra"]["password"])
t.read("my_switch#")
t.write("exit")
t.read_eof()
@with_protocol
def test_no_such_command_return_to_prompt(self, t):
enable(t)
t.write("shizzle")
t.readln("No such command : shizzle")
t.read("my_switch#")
@with_protocol
@mock.patch("fake_switches.adapters.tftp_reader.read_tftp")
def test_command_copy_failing(self, t, read_tftp):
read_tftp.side_effect = Exception("Stuff")
enable(t)
t.write("copy tftp://1.2.3.4/my-file system:/running-config")
t.read("Destination filename [running-config]? ")
t.write("gneh")
t.readln("Accessing tftp://1.2.3.4/my-file...")
t.readln("Error opening tftp://1.2.3.4/my-file (Timed out)")
t.read("my_switch#")
read_tftp.assert_called_with("1.2.3.4", "my-file")
@with_protocol
@mock.patch("fake_switches.adapters.tftp_reader.read_tftp")
def test_command_copy_success(self, t, read_tftp):
enable(t)
t.write("copy tftp://1.2.3.4/my-file system:/running-config")
t.read("Destination filename [running-config]? ")
t.write_raw("\r")
t.wait_for("\r\n")
t.readln("Accessing tftp://1.2.3.4/my-file...")
t.readln("Done (or some official message...)")
t.read("my_switch#")
read_tftp.assert_called_with("1.2.3.4", "my-file")
@with_protocol
def test_command_show_run_int_vlan_empty(self, t):
enable(t)
t.write("terminal length 0")
t.read("my_switch#")
t.write("show run vlan 120")
t.readln("Building configuration...")
t.readln("")
t.readln("Current configuration:")
t.readln("end")
t.readln("")
t.read("my_switch#")
@with_protocol
def test_command_add_vlan(self, t):
enable(t)
t.write("conf t")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("vlan 123")
t.read("my_switch(config-vlan)#")
t.write("name shizzle")
t.read("my_switch(config-vlan)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
t.write("show run vlan 123")
t.readln("Building configuration...")
t.readln("")
t.readln("Current configuration:")
t.readln("!")
t.readln("vlan 123")
t.readln(" name shizzle")
t.readln("end")
t.readln("")
t.read("my_switch#")
remove_vlan(t, "123")
t.write("show running-config vlan 123")
t.readln("Building configuration...")
t.readln("")
t.readln("Current configuration:")
t.readln("end")
t.read("")
@with_protocol
def test_command_assign_access_vlan_to_port(self, t):
enable(t)
create_vlan(t, "123")
set_interface_on_vlan(t, "FastEthernet0/1", "123")
assert_interface_configuration(t, "Fa0/1", [
"interface FastEthernet0/1",
" switchport access vlan 123",
" switchport mode access",
"end"])
configuring_interface(t, "FastEthernet0/1", do="no switchport access vlan")
assert_interface_configuration(t, "Fa0/1", [
"interface FastEthernet0/1",
" switchport mode access",
"end"])
configuring_interface(t, "FastEthernet0/1", do="no switchport mode access")
assert_interface_configuration(t, "Fa0/1", [
"interface FastEthernet0/1",
"end"])
remove_vlan(t, "123")
@with_protocol
def test_show_vlan_brief(self, t):
enable(t)
create_vlan(t, "123")
create_vlan(t, "3333", "some-name")
create_vlan(t, "2222", "your-name-is-way-too-long-for-this-pretty-printed-interface-man")
set_interface_on_vlan(t, "FastEthernet0/1", "123")
t.write("show vlan brief")
t.readln("")
t.readln("VLAN Name Status Ports")
t.readln("---- -------------------------------- --------- -------------------------------")
t.readln("1 default active Fa0/2, Fa0/3, Fa0/4, Fa0/5")
t.readln(" Fa0/6, Fa0/7, Fa0/8, Fa0/9")
t.readln(" Fa0/10, Fa0/11, Fa0/12")
t.readln("123 VLAN123 active Fa0/1")
t.readln("2222 your-name-is-way-too-long-for-th active")
t.readln("3333 some-name active")
t.read("my_switch#")
revert_switchport_mode_access(t, "FastEthernet0/1")
remove_vlan(t, "123")
remove_vlan(t, "2222")
remove_vlan(t, "3333")
@with_protocol
def test_show_vlan(self, t):
enable(t)
create_vlan(t, "123")
create_vlan(t, "3333", "some-name")
create_vlan(t, "2222", "your-name-is-way-too-long-for-this-pretty-printed-interface-man")
set_interface_on_vlan(t, "FastEthernet0/1", "123")
t.write("show vlan")
t.readln("")
t.readln("VLAN Name Status Ports")
t.readln("---- -------------------------------- --------- -------------------------------")
t.readln("1 default active Fa0/2, Fa0/3, Fa0/4, Fa0/5")
t.readln(" Fa0/6, Fa0/7, Fa0/8, Fa0/9")
t.readln(" Fa0/10, Fa0/11, Fa0/12")
t.readln("123 VLAN123 active Fa0/1")
t.readln("2222 your-name-is-way-too-long-for-th active")
t.readln("3333 some-name active")
t.readln("")
t.readln("VLAN Type SAID MTU Parent RingNo BridgeNo Stp BrdgMode Trans1 Trans2")
t.readln("---- ----- ---------- ----- ------ ------ -------- ---- -------- ------ ------")
t.readln("1 enet 100001 1500 - - - - - 0 0")
t.readln("123 enet 100123 1500 - - - - - 0 0")
t.readln("2222 enet 102222 1500 - - - - - 0 0")
t.readln("3333 enet 103333 1500 - - - - - 0 0")
t.readln("")
t.readln("Remote SPAN VLANs")
t.readln("------------------------------------------------------------------------------")
t.readln("")
t.readln("")
t.readln("Primary Secondary Type Ports")
t.readln("------- --------- ----------------- ------------------------------------------")
t.readln("")
t.read("my_switch#")
revert_switchport_mode_access(t, "FastEthernet0/1")
remove_vlan(t, "123")
remove_vlan(t, "2222")
remove_vlan(t, "3333")
@with_protocol
def test_shutting_down(self, t):
enable(t)
configuring_interface(t, "FastEthernet 0/3", do="shutdown")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" shutdown",
"end"])
configuring_interface(t, "FastEthernet 0/3", do="no shutdown")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
"end"])
@with_protocol
def test_configure_trunk_port(self, t):
enable(t)
configuring_interface(t, "Fa0/3", do="switchport mode trunk")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport mode trunk",
"end"])
# not really added because all vlan are in trunk by default on cisco
configuring_interface(t, "Fa0/3", do="switchport trunk allowed vlan add 123")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport mode trunk",
"end"])
configuring_interface(t, "Fa0/3", do="switchport trunk allowed vlan none")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport trunk allowed vlan none",
" switchport mode trunk",
"end"])
configuring_interface(t, "Fa0/3", do="switchport trunk allowed vlan add 123")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport trunk allowed vlan 123",
" switchport mode trunk",
"end"])
configuring_interface(t, "Fa0/3", do="switchport trunk allowed vlan add 124,126-128")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport trunk allowed vlan 123,124,126-128",
" switchport mode trunk",
"end"])
configuring_interface(t, "Fa0/3", do="switchport trunk allowed vlan remove 123-124,127")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport trunk allowed vlan 126,128",
" switchport mode trunk",
"end"])
configuring_interface(t, "Fa0/3", do="switchport trunk allowed vlan all")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport mode trunk",
"end"])
configuring_interface(t, "Fa0/3", do="switchport trunk allowed vlan 123-124,127")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport trunk allowed vlan 123,124,127",
" switchport mode trunk",
"end"])
configuring_interface(t, "Fa0/3", do="no switchport trunk allowed vlan")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" switchport mode trunk",
"end"])
configuring_interface(t, "Fa0/3", do="no switchport mode")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
"end"])
@with_protocol
def test_configure_native_vlan(self, t):
enable(t)
configuring_interface(t, "FastEthernet0/2", do="switchport trunk native vlan 555")
assert_interface_configuration(t, "Fa0/2", [
"interface FastEthernet0/2",
" switchport trunk native vlan 555",
"end"])
configuring_interface(t, "FastEthernet0/2", do="no switchport trunk native vlan")
assert_interface_configuration(t, "Fa0/2", [
"interface FastEthernet0/2",
"end"])
@with_protocol
def test_setup_an_interface(self, t):
enable(t)
create_vlan(t, "2999")
create_interface_vlan(t, "2999")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
configuring_interface_vlan(t, "2999", do="description hey ho")
configuring_interface_vlan(t, "2999", do="ip address 1.1.1.2 255.255.255.0")
configuring_interface_vlan(t, "2999", do="standby 1 ip 1.1.1.1")
configuring_interface_vlan(t, "2999", do='standby 1 timers 5 15')
configuring_interface_vlan(t, "2999", do='standby 1 priority 110')
configuring_interface_vlan(t, "2999", do='standby 1 preempt delay minimum 60')
configuring_interface_vlan(t, "2999", do='standby 1 authentication VLAN2999')
configuring_interface_vlan(t, "2999", do='standby 1 track 10 decrement 50')
configuring_interface_vlan(t, "2999", do='standby 1 track 20 decrement 50')
configuring_interface_vlan(t, "2999", do='no ip proxy-arp')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" description hey ho",
" ip address 1.1.1.2 255.255.255.0",
" no ip proxy-arp",
" standby 1 ip 1.1.1.1",
" standby 1 timers 5 15",
" standby 1 priority 110",
" standby 1 preempt delay minimum 60",
" standby 1 authentication VLAN2999",
" standby 1 track 10 decrement 50",
" standby 1 track 20 decrement 50",
"end"])
configuring_interface_vlan(t, "2999", do="ip address 2.2.2.2 255.255.255.0")
configuring_interface_vlan(t, "2999", do="standby 1 ip 2.2.2.1")
configuring_interface_vlan(t, "2999", do="standby 1 ip 2.2.2.3 secondary")
configuring_interface_vlan(t, "2999", do="no standby 1 authentication")
configuring_interface_vlan(t, "2999", do="standby 1 preempt delay minimum 42")
configuring_interface_vlan(t, "2999", do="no standby 1 priority")
configuring_interface_vlan(t, "2999", do="no standby 1 timers")
configuring_interface_vlan(t, "2999", do="no standby 1 track 10")
configuring_interface_vlan(t, "2999", do="ip proxy-arp")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" description hey ho",
" ip address 2.2.2.2 255.255.255.0",
" standby 1 ip 2.2.2.1",
" standby 1 ip 2.2.2.3 secondary",
" standby 1 preempt delay minimum 42",
" standby 1 track 20 decrement 50",
"end"])
configuring_interface_vlan(t, "2999", do="no standby 1 ip 2.2.2.3")
configuring_interface_vlan(t, "2999", do="no standby 1 preempt delay")
configuring_interface_vlan(t, "2999", do="no standby 1 track 20")
configuring_interface_vlan(t, "2999", do="")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" description hey ho",
" ip address 2.2.2.2 255.255.255.0",
" standby 1 ip 2.2.2.1",
" standby 1 preempt",
"end"])
configuring_interface_vlan(t, "2999", do="no standby 1 ip 2.2.2.1")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" description hey ho",
" ip address 2.2.2.2 255.255.255.0",
" standby 1 preempt",
"end"])
configuring_interface_vlan(t, "2999", do="no standby 1")
configuring_interface_vlan(t, "2999", do="no description")
configuring_interface_vlan(t, "2999", do="")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" ip address 2.2.2.2 255.255.255.0",
"end"])
configuring(t, do="no interface vlan 2999")
t.write("show run int vlan 2999")
t.readln("\s*\^", regex=True)
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch#")
remove_vlan(t, "2999")
@with_protocol
def test_partial_standby_properties(self, t):
enable(t)
create_vlan(t, "2999")
create_interface_vlan(t, "2999")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
configuring_interface_vlan(t, "2999", do='standby 1 timers 5 15')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" standby 1 timers 5 15",
"end"])
configuring_interface_vlan(t, "2999", do="no standby 1 timers")
configuring_interface_vlan(t, "2999", do='standby 1 priority 110')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" standby 1 priority 110",
"end"])
configuring_interface_vlan(t, "2999", do="no standby 1 priority")
configuring_interface_vlan(t, "2999", do='standby 1 preempt delay minimum 60')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" standby 1 preempt delay minimum 60",
"end"])
configuring_interface_vlan(t, "2999", do="no standby 1 preempt")
configuring_interface_vlan(t, "2999", do='standby 1 authentication VLAN2999')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" standby 1 authentication VLAN2999",
"end"])
configuring_interface_vlan(t, "2999", do="no standby 1 authentication")
configuring_interface_vlan(t, "2999", do='standby 1 track 10 decrement 50')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" standby 1 track 10 decrement 50",
"end"])
configuring_interface_vlan(t, "2999", do="no standby 1 track 10")
configuring(t, do="no interface vlan 2999")
remove_vlan(t, "2999")
@with_protocol
def test_partial_standby_ip_definition(self, t):
enable(t)
create_vlan(t, "2999")
create_interface_vlan(t, "2999")
configuring_interface_vlan(t, "2999", do='standby 1 ip')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" standby 1 ip",
"end"])
configuring_interface_vlan(t, "2999", do='no standby 1 ip')
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface vlan 2999")
t.read("my_switch(config-if)#")
t.write("standby 1 ip 1..1.1")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("standby 1 ip 1.1.1.1")
t.readln("% Warning: address is not within a subnet on this interface")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
configuring_interface_vlan(t, "2999", do="ip address 1.1.1.2 255.255.255.0")
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface vlan 2999")
t.read("my_switch(config-if)#")
t.write("standby 1 ip 2.1.1.1")
t.readln("% Warning: address is not within a subnet on this interface")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
configuring_interface_vlan(t, "2999", do='standby 1 ip 1.1.1.1')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" ip address 1.1.1.2 255.255.255.0",
" standby 1 ip 1.1.1.1",
"end"])
configuring_interface_vlan(t, "2999", do='standby 1 ip')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" ip address 1.1.1.2 255.255.255.0",
" standby 1 ip 1.1.1.1",
"end"])
configuring_interface_vlan(t, "2999", do="no ip address 1.1.1.2 255.255.255.0")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" standby 1 ip 1.1.1.1",
"end"])
configuring_interface_vlan(t, "2999", do='no standby 1 ip 1.1.1.1')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
configuring(t, do="no interface vlan 2999")
remove_vlan(t, "2999")
@with_protocol
def test_creating_a_port_channel(self, t):
enable(t)
create_port_channel_interface(t, '1')
configuring_port_channel(t, '1', 'description HELLO')
configuring_port_channel(t, '1', 'switchport trunk encapsulation dot1q')
configuring_port_channel(t, '1', 'switchport trunk native vlan 998')
configuring_port_channel(t, '1', 'switchport trunk allowed vlan 6,4087-4089,4091,4093')
configuring_port_channel(t, '1', 'switchport mode trunk')
assert_interface_configuration(t, 'Port-channel1', [
"interface Port-channel1",
" description HELLO",
" switchport trunk encapsulation dot1q",
" switchport trunk native vlan 998",
" switchport trunk allowed vlan 6,4087-4089,4091,4093",
" switchport mode trunk",
"end"
])
t.write("show etherchannel summary")
t.readln("Flags: D - down P - bundled in port-channel")
t.readln(" I - stand-alone s - suspended")
t.readln(" H - Hot-standby (LACP only)")
t.readln(" R - Layer3 S - Layer2")
t.readln(" U - in use f - failed to allocate aggregator")
t.readln("")
t.readln(" M - not in use, minimum links not met")
t.readln(" u - unsuitable for bundling")
t.readln(" w - waiting to be aggregated")
t.readln(" d - default port")
t.readln("")
t.readln("")
t.readln("Number of channel-groups in use: 1")
t.readln("Number of aggregators: 1")
t.readln("")
t.readln("Group Port-channel Protocol Ports")
t.readln("------+-------------+-----------+-----------------------------------------------")
t.readln("1 Po1(S) LACP ")
t.readln("")
t.read("my_switch#")
configuring(t, do="no interface port-channel 1")
t.write("show run int po1")
t.readln("\s*\^", regex=True)
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch#")
@with_protocol
def test_port_channel_is_automatically_created_when_adding_a_port_to_it(self, t):
enable(t)
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface FastEthernet0/1")
t.read("my_switch(config-if)#")
t.write("channel-group 2 mode active")
t.readln("Creating a port-channel interface Port-channel 2")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, 'fa0/1', [
"interface FastEthernet0/1",
" channel-group 2 mode active",
"end"
])
assert_interface_configuration(t, 'po2', [
"interface Port-channel2",
"end"
])
t.write("show etherchannel summary")
t.readln("Flags: D - down P - bundled in port-channel")
t.readln(" I - stand-alone s - suspended")
t.readln(" H - Hot-standby (LACP only)")
t.readln(" R - Layer3 S - Layer2")
t.readln(" U - in use f - failed to allocate aggregator")
t.readln("")
t.readln(" M - not in use, minimum links not met")
t.readln(" u - unsuitable for bundling")
t.readln(" w - waiting to be aggregated")
t.readln(" d - default port")
t.readln("")
t.readln("")
t.readln("Number of channel-groups in use: 1")
t.readln("Number of aggregators: 1")
t.readln("")
t.readln("Group Port-channel Protocol Ports")
t.readln("------+-------------+-----------+-----------------------------------------------")
t.readln("2 Po2(SU) LACP Fa0/1(P)")
t.readln("")
t.read("my_switch#")
configuring(t, do="no interface port-channel 2")
configuring_interface(t, interface="fa0/1", do="no channel-group 2 mode on")
assert_interface_configuration(t, "fa0/1", [
"interface FastEthernet0/1",
"end"
])
@with_protocol
def test_port_channel_is_not_automatically_created_when_adding_a_port_to_it_if_its_already_created(self, t):
enable(t)
create_port_channel_interface(t, '14')
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface FastEthernet0/1")
t.read("my_switch(config-if)#")
t.write("channel-group 14 mode active")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, "fa0/1", [
"interface FastEthernet0/1",
" channel-group 14 mode active",
"end"
])
configuring_interface(t, interface="fa0/1", do="no channel-group 14 mode on")
assert_interface_configuration(t, "fa0/1", [
"interface FastEthernet0/1",
"end"
])
configuring(t, do="no interface port-channel 14")
@with_protocol
def test_setting_secondary_ips(self, t):
enable(t)
create_interface_vlan(t, "2999")
configuring_interface_vlan(t, "2999", do="description hey ho")
configuring_interface_vlan(t, "2999", do="no ip redirects")
configuring_interface_vlan(t, "2999", do="ip address 1.1.1.1 255.255.255.0")
configuring_interface_vlan(t, "2999", do="ip address 2.2.2.1 255.255.255.0 secondary")
configuring_interface_vlan(t, "2999", do="ip address 4.4.4.1 255.255.255.0 secondary")
configuring_interface_vlan(t, "2999", do="ip address 3.3.3.1 255.255.255.0 secondary")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" description hey ho",
" ip address 2.2.2.1 255.255.255.0 secondary",
" ip address 4.4.4.1 255.255.255.0 secondary",
" ip address 3.3.3.1 255.255.255.0 secondary",
" ip address 1.1.1.1 255.255.255.0",
" no ip redirects",
"end"])
configuring_interface_vlan(t, "2999", do="no ip address")
configuring_interface_vlan(t, "2999", do="ip redirects")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" description hey ho",
" no ip address",
"end"])
configuring(t, do="no interface vlan 2999")
@with_protocol
def test_setting_access_group(self, t):
enable(t)
create_interface_vlan(t, "2999")
configuring_interface_vlan(t, "2999", do="ip access-group SHNITZLE in")
configuring_interface_vlan(t, "2999", do="ip access-group WHIZZLE out")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" ip access-group SHNITZLE in",
" ip access-group WHIZZLE out",
"end"])
configuring_interface_vlan(t, "2999", do="no ip access-group in")
configuring_interface_vlan(t, "2999", do="no ip access-group WHIZZLE out")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
configuring(t, do="no interface vlan 2999")
@with_protocol
def test_removing_ip_address(self, t):
enable(t)
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface vlan2999")
t.read("my_switch(config-if)#")
t.write("ip address 1.1.1.1 255.255.255.0")
t.read("my_switch(config-if)#")
t.write("ip address 2.2.2.2 255.255.255.0 secondary")
t.read("my_switch(config-if)#")
t.write("no ip address 1.1.1.1 255.255.255.0")
t.readln("Must delete secondary before deleting primary")
t.read("my_switch(config-if)#")
t.write("no ip address 2.2.2.2 255.255.255.0 secondary")
t.read("my_switch(config-if)#")
t.write("no ip address 1.1.1.1 255.255.255.0")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
configuring(t, do="no interface vlan 2999")
@with_protocol
def test_show_ip_interface(self, t):
enable(t)
create_vlan(t, "1000")
create_interface_vlan(t, "1000")
create_vlan(t, "2000")
create_vlan(t, "3000")
create_interface_vlan(t, "3000")
configuring_interface_vlan(t, "3000", do="ip address 1.1.1.1 255.255.255.0")
create_interface_vlan(t, "4000")
configuring_interface_vlan(t, "4000", do="ip vrf forwarding DEFAULT-LAN")
configuring_interface_vlan(t, "4000", do="ip address 2.2.2.2 255.255.255.0")
configuring_interface_vlan(t, "4000", do="ip address 4.2.2.2 255.255.255.0 secondary")
configuring_interface_vlan(t, "4000", do="ip address 3.2.2.2 255.255.255.0 secondary")
configuring_interface_vlan(t, "4000", do="ip address 3.2.2.2 255.255.255.128 secondary")
configuring_interface_vlan(t, "4000", do="ip access-group shizzle in")
configuring_interface_vlan(t, "4000", do="ip access-group whizzle out")
t.write("show ip interface")
t.readln("Vlan1000 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("Vlan3000 is down, line protocol is down")
t.readln(" Internet address is 1.1.1.1/24")
t.readln(" Outgoing access list is not set")
t.readln(" Inbound access list is not set")
t.readln("Vlan4000 is down, line protocol is down")
t.readln(" Internet address is 2.2.2.2/24")
t.readln(" Secondary address 4.2.2.2/24")
t.readln(" Secondary address 3.2.2.2/25")
t.readln(" Outgoing access list is whizzle")
t.readln(" Inbound access list is shizzle")
t.readln(" VPN Routing/Forwarding \"DEFAULT-LAN\"")
t.readln("FastEthernet0/1 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/2 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/3 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/4 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/5 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/6 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/7 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/8 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/9 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/10 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/11 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.readln("FastEthernet0/12 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.read("my_switch#")
t.write("show ip interface vlan 4000")
t.readln("Vlan4000 is down, line protocol is down")
t.readln(" Internet address is 2.2.2.2/24")
t.readln(" Secondary address 4.2.2.2/24")
t.readln(" Secondary address 3.2.2.2/25")
t.readln(" Outgoing access list is whizzle")
t.readln(" Inbound access list is shizzle")
t.readln(" VPN Routing/Forwarding \"DEFAULT-LAN\"")
t.read("my_switch#")
t.write("show ip interface vlan1000")
t.readln("Vlan1000 is down, line protocol is down")
t.readln(" Internet protocol processing disabled")
t.read("my_switch#")
configuring(t, do="no interface vlan 1000")
configuring(t, do="no interface vlan 3000")
configuring(t, do="no interface vlan 4000")
remove_vlan(t, "1000")
remove_vlan(t, "2000")
remove_vlan(t, "3000")
@with_protocol
def test_assigning_a_secondary_ip_as_the_primary_removes_it_from_secondary_and_removes_the_primary(self, t):
enable(t)
create_interface_vlan(t, "4000")
configuring_interface_vlan(t, "4000", do="ip address 2.2.2.2 255.255.255.0")
configuring_interface_vlan(t, "4000", do="ip address 4.2.2.2 255.255.255.0 secondary")
configuring_interface_vlan(t, "4000", do="ip address 3.2.2.2 255.255.255.0 secondary")
configuring_interface_vlan(t, "4000", do="ip address 3.2.2.2 255.255.255.128")
assert_interface_configuration(t, "Vlan4000", [
"interface Vlan4000",
" ip address 4.2.2.2 255.255.255.0 secondary",
" ip address 3.2.2.2 255.255.255.128",
"end"])
configuring(t, do="no interface vlan 4000")
@with_protocol
def test_overlapping_ips(self, t):
enable(t)
create_vlan(t, "1000")
create_interface_vlan(t, "1000")
create_vlan(t, "2000")
create_interface_vlan(t, "2000")
configuring_interface_vlan(t, "1000", do="ip address 2.2.2.2 255.255.255.0")
configuring_interface_vlan(t, "1000", do="ip address 3.3.3.3 255.255.255.0 secondary")
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface vlan2000")
t.read("my_switch(config-if)#")
t.write("ip address 2.2.2.75 255.255.255.128")
t.readln("% 2.2.2.0 overlaps with secondary address on Vlan1000")
t.read("my_switch(config-if)#")
t.write("ip address 3.3.3.4 255.255.255.128")
t.readln("% 3.3.3.0 is assigned as a secondary address on Vlan1000")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
configuring(t, do="no interface vlan 2000")
remove_vlan(t, "2000")
configuring(t, do="no interface vlan 1000")
remove_vlan(t, "1000")
@with_protocol
def test_unknown_ip_interface(self, t):
enable(t)
t.write("show ip interface Vlan2345")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch#")
@with_protocol
def test_removing_ip_needs_to_compare_objects_better(self, t):
enable(t)
create_vlan(t, "1000")
create_interface_vlan(t, "1000")
configuring_interface_vlan(t, "1000", do="ip address 1.1.1.1 255.255.255.0")
configuring_interface_vlan(t, "1000", do="ip address 1.1.1.2 255.255.255.0 secondary")
configuring_interface_vlan(t, "1000", do="ip address 1.1.1.3 255.255.255.0 secondary")
configuring_interface_vlan(t, "1000", do="no ip address 1.1.1.3 255.255.255.0 secondary")
t.write("show ip interface vlan 1000")
t.readln("Vlan1000 is down, line protocol is down")
t.readln(" Internet address is 1.1.1.1/24")
t.readln(" Secondary address 1.1.1.2/24")
t.readln(" Outgoing access list is not set")
t.readln(" Inbound access list is not set")
t.read("my_switch#")
configuring(t, do="no interface vlan 1000")
remove_vlan(t, "1000")
@with_protocol
def test_extreme_vlan_range(self, t):
enable(t)
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("vlan -1")
t.readln("Command rejected: Bad VLAN list - character #1 ('-') delimits a VLAN number")
t.readln(" which is out of the range 1..4094.")
t.read("my_switch(config)#")
t.write("vlan 0")
t.readln("Command rejected: Bad VLAN list - character #X (EOL) delimits a VLAN")
t.readln("number which is out of the range 1..4094.")
t.read("my_switch(config)#")
t.write("vlan 1")
t.read("my_switch(config-vlan)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("vlan 4094")
t.read("my_switch(config-vlan)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("no vlan 4094")
t.read("my_switch(config)#")
t.write("vlan 4095")
t.readln("Command rejected: Bad VLAN list - character #X (EOL) delimits a VLAN")
t.readln("number which is out of the range 1..4094.")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
@with_protocol
def test_full_running_config_and_pipe_begin_support(self, t):
enable(t)
create_vlan(t, "1000", name="hello")
create_interface_vlan(t, "1000")
configuring_interface(t, "Fa0/2", do="switchport mode trunk")
configuring_interface(t, "Fa0/2", do="switchport trunk allowed vlan 125")
t.write("show running | beg vlan")
t.readln("vlan 1")
t.readln("!")
t.readln("vlan 1000")
t.readln(" name hello")
t.readln("!")
t.readln("interface FastEthernet0/1")
t.readln("!")
t.readln("interface FastEthernet0/2")
t.readln(" switchport trunk allowed vlan 125")
t.readln(" switchport mode trunk")
t.readln("!")
t.readln("interface FastEthernet0/3")
t.readln("!")
t.readln("interface FastEthernet0/4")
t.readln("!")
t.readln("interface FastEthernet0/5")
t.readln("!")
t.readln("interface FastEthernet0/6")
t.readln("!")
t.readln("interface FastEthernet0/7")
t.readln("!")
t.readln("interface FastEthernet0/8")
t.readln("!")
t.readln("interface FastEthernet0/9")
t.readln("!")
t.readln("interface FastEthernet0/10")
t.readln("!")
t.readln("interface FastEthernet0/11")
t.readln("!")
t.readln("interface FastEthernet0/12")
t.readln("!")
t.readln("interface Vlan1000")
t.readln(" no ip address")
t.readln("!")
t.readln("end")
t.readln("")
t.read("my_switch#")
configuring_interface(t, "Fa0/2", do="no switchport mode trunk")
configuring_interface(t, "Fa0/2", do="no switchport trunk allowed vlan")
configuring(t, do="no interface vlan 1000")
remove_vlan(t, "1000")
@with_protocol
def test_pipe_inc_support(self, t):
enable(t)
create_vlan(t, "1000", name="hello")
t.write("show running | inc vlan")
t.readln("vlan 1")
t.readln("vlan 1000")
t.read("my_switch#")
remove_vlan(t, "1000")
@with_protocol
def test_ip_vrf(self, t):
enable(t)
t.write("conf t")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("ip vrf SOME-LAN")
t.read("my_switch(config-vrf)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("no ip vrf SOME-LAN")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
@with_protocol
def test_ip_vrf_forwarding(self, t):
enable(t)
t.write("conf t")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("ip vrf SOME-LAN")
t.read("my_switch(config-vrf)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("interface Fa0/2")
t.read("my_switch(config-if)#")
t.write("ip vrf forwarding NOT-DEFAULT-LAN")
t.readln("% VRF NOT-DEFAULT-LAN not configured.")
t.read("my_switch(config-if)#")
t.write("ip vrf forwarding SOME-LAN")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, "Fa0/2", [
"interface FastEthernet0/2",
" ip vrf forwarding SOME-LAN",
"end"])<|fim▁hole|> t.write("conf t")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("no ip vrf SOME-LAN")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, "Fa0/2", [
"interface FastEthernet0/2",
"end"])
@with_protocol
def test_ip_vrf_default_lan(self, t):
enable(t)
t.write("conf t")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface Fa0/2")
t.read("my_switch(config-if)#")
t.write("ip vrf forwarding DEFAULT-LAN")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, "Fa0/2", [
"interface FastEthernet0/2",
" ip vrf forwarding DEFAULT-LAN",
"end"])
t.write("conf t")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface Fa0/2")
t.read("my_switch(config-if)#")
t.write("no ip vrf forwarding")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, "Fa0/2", [
"interface FastEthernet0/2",
"end"])
@with_protocol
def test_ip_setting_vrf_forwarding_wipes_ip_addresses(self, t):
enable(t)
create_vlan(t, "4000")
create_interface_vlan(t, "4000")
configuring_interface_vlan(t, "4000", do="ip address 10.10.0.10 255.255.255.0")
configuring_interface_vlan(t, "4000", do="ip address 10.10.1.10 255.255.255.0 secondary")
assert_interface_configuration(t, "Vlan4000", [
"interface Vlan4000",
" ip address 10.10.1.10 255.255.255.0 secondary",
" ip address 10.10.0.10 255.255.255.0",
"end"])
configuring_interface_vlan(t, "4000", do="ip vrf forwarding DEFAULT-LAN")
assert_interface_configuration(t, "Vlan4000", [
"interface Vlan4000",
" ip vrf forwarding DEFAULT-LAN",
" no ip address",
"end"])
configuring(t, do="no interface vlan 4000")
remove_vlan(t, "4000")
@with_protocol
def test_ip_helper(self, t):
enable(t)
create_interface_vlan(t, "4000")
assert_interface_configuration(t, "Vlan4000", [
"interface Vlan4000",
" no ip address",
"end"])
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface vlan 4000")
t.read("my_switch(config-if)#")
t.write("ip helper-address")
t.readln("% Incomplete command.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("ip helper-address 1.1.1")
t.readln("% Incomplete command.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("ip helper-address 1.a.1")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.") # not incomplete
t.readln("")
t.read("my_switch(config-if)#")
t.write("ip helper-address invalid.ip")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("ip helper-address 10.10.0.1 EXTRA INFO")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
configuring_interface_vlan(t, "4000", do="ip helper-address 10.10.10.1")
assert_interface_configuration(t, "Vlan4000", [
"interface Vlan4000",
" no ip address",
" ip helper-address 10.10.10.1",
"end"])
configuring_interface_vlan(t, "4000", do="ip helper-address 10.10.10.1")
configuring_interface_vlan(t, "4000", do="ip helper-address 10.10.10.2")
configuring_interface_vlan(t, "4000", do="ip helper-address 10.10.10.3")
assert_interface_configuration(t, "Vlan4000", [
"interface Vlan4000",
" no ip address",
" ip helper-address 10.10.10.1",
" ip helper-address 10.10.10.2",
" ip helper-address 10.10.10.3",
"end"])
configuring_interface_vlan(t, "4000", do="no ip helper-address 10.10.10.1")
assert_interface_configuration(t, "Vlan4000", [
"interface Vlan4000",
" no ip address",
" ip helper-address 10.10.10.2",
" ip helper-address 10.10.10.3",
"end"])
configuring_interface_vlan(t, "4000", do="no ip helper-address 10.10.10.1")
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface vlan 4000")
t.read("my_switch(config-if)#")
t.write("no ip helper-address 10.10.0.1 EXTRA INFO")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
configuring_interface_vlan(t, "4000", do="no ip helper-address")
assert_interface_configuration(t, "Vlan4000", [
"interface Vlan4000",
" no ip address",
"end"])
configuring(t, do="no interface vlan 4000")
@with_protocol
def test_ip_route(self, t):
enable(t)
configuring(t, do="ip route 1.1.1.0 255.255.255.0 2.2.2.2")
t.write("show ip route static | inc 2.2.2.2")
t.readln("S 1.1.1.0 [x/y] via 2.2.2.2")
t.read("my_switch#")
t.write("show running | inc 2.2.2.2")
t.readln("ip route 1.1.1.0 255.255.255.0 2.2.2.2")
t.read("my_switch#")
configuring(t, do="no ip route 1.1.1.0 255.255.255.0 2.2.2.2")
t.write("show ip route static")
t.readln("")
t.read("my_switch#")
t.write("exit")
@with_protocol
def test_write_memory(self, t):
enable(t)
t.write("write memory")
t.readln("Building configuration...")
t.readln("OK")
t.read("my_switch#")
@with_protocol
def test_show_version(self, t):
enable(t)
t.write("show version")
t.readln("Cisco IOS Software, C3750 Software (C3750-IPSERVICESK9-M), Version 12.2(58)SE2, RELEASE SOFTWARE (fc1)")
t.readln("Technical Support: http://www.cisco.com/techsupport")
t.readln("Copyright (c) 1986-2011 by Cisco Systems, Inc.")
t.readln("Compiled Thu 21-Jul-11 01:53 by prod_rel_team")
t.readln("")
t.readln("ROM: Bootstrap program is C3750 boot loader")
t.readln("BOOTLDR: C3750 Boot Loader (C3750-HBOOT-M) Version 12.2(44)SE5, RELEASE SOFTWARE (fc1)")
t.readln("")
t.readln("my_switch uptime is 1 year, 18 weeks, 5 days, 1 hour, 11 minutes")
t.readln("System returned to ROM by power-on")
t.readln("System image file is \"flash:c3750-ipservicesk9-mz.122-58.SE2.bin\"")
t.readln("")
t.readln("")
t.readln("This product contains cryptographic features and is subject to United")
t.readln("States and local country laws governing import, export, transfer and")
t.readln("use. Delivery of Cisco cryptographic products does not imply")
t.readln("third-party authority to import, export, distribute or use encryption.")
t.readln("Importers, exporters, distributors and users are responsible for")
t.readln("compliance with U.S. and local country laws. By using this product you")
t.readln("agree to comply with applicable laws and regulations. If you are unable")
t.readln("to comply with U.S. and local laws, return this product immediately.")
t.readln("")
t.readln("A summary of U.S. laws governing Cisco cryptographic products may be found at:")
t.readln("http://www.cisco.com/wwl/export/crypto/tool/stqrg.html")
t.readln("")
t.readln("If you require further assistance please contact us by sending email to")
t.readln("[email protected].")
t.readln("")
t.readln("cisco WS-C3750G-24TS-1U (PowerPC405) processor (revision H0) with 131072K bytes of memory.")
t.readln("Processor board ID FOC1530X2F7")
t.readln("Last reset from power-on")
t.readln("0 Virtual Ethernet interfaces")
t.readln("12 Gigabit Ethernet interfaces")
t.readln("The password-recovery mechanism is enabled.")
t.readln("")
t.readln("512K bytes of flash-simulated non-volatile configuration memory.")
t.readln("Base ethernet MAC Address : 00:00:00:00:00:00")
t.readln("Motherboard assembly number : 73-10219-09")
t.readln("Power supply part number : 341-0098-02")
t.readln("Motherboard serial number : FOC153019Z6")
t.readln("Power supply serial number : ALD153000BB")
t.readln("Model revision number : H0")
t.readln("Motherboard revision number : A0")
t.readln("Model number : WS-C3750G-24TS-S1U")
t.readln("System serial number : FOC1530X2F7")
t.readln("Top Assembly Part Number : 800-26859-03")
t.readln("Top Assembly Revision Number : C0")
t.readln("Version ID : V05")
t.readln("CLEI Code Number : COMB600BRA")
t.readln("Hardware Board Revision Number : 0x09")
t.readln("")
t.readln("")
t.readln("Switch Ports Model SW Version SW Image")
t.readln("------ ----- ----- ---------- ----------")
t.readln("* 1 12 WS-C3750G-24TS-1U 12.2(58)SE2 C3750-IPSERVICESK9-M")
t.readln("")
t.readln("")
t.readln("Configuration register is 0xF")
t.readln("")
t.read("my_switch#")
@with_protocol
def test_reset_port(self, t):
enable(t)
configuring_interface(t, "FastEthernet0/3", do="description shizzle the whizzle and drizzle with lizzle")
configuring_interface(t, "FastEthernet0/3", do="shutdown")
set_interface_on_vlan(t, "FastEthernet0/3", "123")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" description shizzle the whizzle and drizzle with lizzle",
" switchport access vlan 123",
" switchport mode access",
" shutdown",
"end"])
configuring(t, "default interface FastEthernet0/3")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
"end"])
@with_protocol
def test_reset_port_invalid_interface_fails(self, t):
enable(t)
configuring_interface(t, "FastEthernet0/3", do="description shizzle the whizzle and drizzle with lizzle")
t.write("conf t")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("default interface WrongInterfaceName0/3")
t.readln("\s*\^", regex=True)
t.readln("% Invalid input detected at '^' marker (not such interface)")
t.readln("")
t.read("my_switch(config)#")
configuring(t, "default interface FastEthernet0/3")
@with_protocol
def test_standby_version(self, t):
enable(t)
create_vlan(t, "2999")
create_interface_vlan(t, "2999")
configuring_interface_vlan(t, "2999", do='standby version 2')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
" standby version 2",
"end"])
configuring_interface_vlan(t, "2999", do='no standby version 2')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
configuring_interface_vlan(t, "2999", do='standby version 2')
configuring_interface_vlan(t, "2999", do='standby version 1')
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
t.write("configure terminal")
t.readln("Enter configuration commands, one per line. End with CNTL/Z.")
t.read("my_switch(config)#")
t.write("interface vlan 2999")
t.read("my_switch(config-if)#")
t.write("standby version")
t.readln("% Incomplete command.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("standby version 3")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("standby version 2 2")
t.readln(" ^")
t.readln("% Invalid input detected at '^' marker.")
t.readln("")
t.read("my_switch(config-if)#")
t.write("exit")
t.read("my_switch(config)#")
t.write("exit")
t.read("my_switch#")
assert_interface_configuration(t, "Vlan2999", [
"interface Vlan2999",
" no ip address",
"end"])
configuring(t, do="no interface vlan 2999")
remove_vlan(t, "2999")
@with_protocol
def test_disable_ntp(self, t):
enable(t)
configuring_interface(t, "FastEthernet 0/3", do="ntp disable")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
" ntp disable",
"end"])
configuring_interface(t, "FastEthernet 0/3", do="no ntp disable")
assert_interface_configuration(t, "FastEthernet0/3", [
"interface FastEthernet0/3",
"end"])
class TestCiscoSwitchProtocolSSH(TestCiscoSwitchProtocol):
__test__ = True
tester_class = SshTester
class TestCiscoSwitchProtocolTelnet(TestCiscoSwitchProtocol):
__test__ = True
tester_class = TelnetTester<|fim▁end|> | |
<|file_name|>textModel.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {OrderGuaranteeEventEmitter} from 'vs/base/common/eventEmitter';
import * as strings from 'vs/base/common/strings';
import {Position} from 'vs/editor/common/core/position';
import {Range} from 'vs/editor/common/core/range';
import * as editorCommon from 'vs/editor/common/editorCommon';
import {ModelLine} from 'vs/editor/common/model/modelLine';
import {guessIndentation} from 'vs/editor/common/model/indentationGuesser';
import {DEFAULT_INDENTATION, DEFAULT_TRIM_AUTO_WHITESPACE} from 'vs/editor/common/config/defaultConfig';
import {PrefixSumComputer} from 'vs/editor/common/viewModel/prefixSumComputer';
var LIMIT_FIND_COUNT = 999;
export const LONG_LINE_BOUNDARY = 1000;
export class TextModel extends OrderGuaranteeEventEmitter implements editorCommon.ITextModel {
private static MODEL_SYNC_LIMIT = 5 * 1024 * 1024; // 5 MB
private static MODEL_TOKENIZATION_LIMIT = 20 * 1024 * 1024; // 20 MB
public static DEFAULT_CREATION_OPTIONS: editorCommon.ITextModelCreationOptions = {
tabSize: DEFAULT_INDENTATION.tabSize,
insertSpaces: DEFAULT_INDENTATION.insertSpaces,
detectIndentation: false,
defaultEOL: editorCommon.DefaultEndOfLine.LF,
trimAutoWhitespace: DEFAULT_TRIM_AUTO_WHITESPACE,
};
/*protected*/ _lines:ModelLine[];
protected _EOL:string;
protected _isDisposed:boolean;
protected _isDisposing:boolean;
protected _options: editorCommon.ITextModelResolvedOptions;
protected _lineStarts: PrefixSumComputer;
private _versionId:number;
/**
* Unlike, versionId, this can go down (via undo) or go to previous values (via redo)
*/
private _alternativeVersionId: number;
private _BOM:string;
private _shouldSimplifyMode: boolean;
private _shouldDenyMode: boolean;
constructor(allowedEventTypes:string[], rawText:editorCommon.IRawText) {
allowedEventTypes.push(editorCommon.EventType.ModelRawContentChanged, editorCommon.EventType.ModelOptionsChanged);
super(allowedEventTypes);
this._shouldSimplifyMode = (rawText.length > TextModel.MODEL_SYNC_LIMIT);
this._shouldDenyMode = (rawText.length > TextModel.MODEL_TOKENIZATION_LIMIT);
this._options = rawText.options;
this._constructLines(rawText);
this._setVersionId(1);
this._isDisposed = false;
this._isDisposing = false;
}
public isTooLargeForHavingAMode(): boolean {
return this._shouldDenyMode;
}
public isTooLargeForHavingARichMode(): boolean {
return this._shouldSimplifyMode;
}
public getOptions(): editorCommon.ITextModelResolvedOptions {
return this._options;
}
public updateOptions(newOpts:editorCommon.ITextModelUpdateOptions): void {
let somethingChanged = false;
let changed:editorCommon.IModelOptionsChangedEvent = {
tabSize: false,
insertSpaces: false,
trimAutoWhitespace: false
};
if (typeof newOpts.insertSpaces !== 'undefined') {
if (this._options.insertSpaces !== newOpts.insertSpaces) {
somethingChanged = true;
changed.insertSpaces = true;
this._options.insertSpaces = newOpts.insertSpaces;
}
}
if (typeof newOpts.tabSize !== 'undefined') {
if (this._options.tabSize !== newOpts.tabSize) {
somethingChanged = true;
changed.tabSize = true;
this._options.tabSize = newOpts.tabSize;
}
}
if (typeof newOpts.trimAutoWhitespace !== 'undefined') {
if (this._options.trimAutoWhitespace !== newOpts.trimAutoWhitespace) {
somethingChanged = true;
changed.trimAutoWhitespace = true;
this._options.trimAutoWhitespace = newOpts.trimAutoWhitespace;
}
}
if (somethingChanged) {
this.emit(editorCommon.EventType.ModelOptionsChanged, changed);
}
}
public detectIndentation(defaultInsertSpaces:boolean, defaultTabSize:number): void {
let lines = this._lines.map(line => line.text);
let guessedIndentation = guessIndentation(lines, defaultTabSize, defaultInsertSpaces);
this.updateOptions({
insertSpaces: guessedIndentation.insertSpaces,
tabSize: guessedIndentation.tabSize
});
}
private _normalizeIndentationFromWhitespace(str:string): string {
let tabSize = this._options.tabSize;
let insertSpaces = this._options.insertSpaces;
let spacesCnt = 0;
for (let i = 0; i < str.length; i++) {
if (str.charAt(i) === '\t') {
spacesCnt += tabSize;
} else {
spacesCnt++;
}
}
let result = '';
if (!insertSpaces) {
let tabsCnt = Math.floor(spacesCnt / tabSize);
spacesCnt = spacesCnt % tabSize;
for (let i = 0; i < tabsCnt; i++) {
result += '\t';
}
}
for (let i = 0; i < spacesCnt; i++) {
result += ' ';
}
return result;
}
public normalizeIndentation(str:string): string {
let firstNonWhitespaceIndex = strings.firstNonWhitespaceIndex(str);
if (firstNonWhitespaceIndex === -1) {
firstNonWhitespaceIndex = str.length;
}
return this._normalizeIndentationFromWhitespace(str.substring(0, firstNonWhitespaceIndex)) + str.substring(firstNonWhitespaceIndex);
}
public getOneIndent(): string {
let tabSize = this._options.tabSize;
let insertSpaces = this._options.insertSpaces;
if (insertSpaces) {
let result = '';
for (let i = 0; i < tabSize; i++) {
result += ' ';
}
return result;
} else {
return '\t';
}
}
public getVersionId(): number {
return this._versionId;
}
public getAlternativeVersionId(): number {
return this._alternativeVersionId;
}
private _ensureLineStarts(): void {
if (!this._lineStarts) {
const lineStartValues:number[] = [];
const eolLength = this._EOL.length;
for (let i = 0, len = this._lines.length; i < len; i++) {
lineStartValues.push(this._lines[i].text.length + eolLength);
}
this._lineStarts = new PrefixSumComputer(lineStartValues);
}
}
public getOffsetAt(rawPosition: editorCommon.IPosition): number {
let position = this.validatePosition(rawPosition);
this._ensureLineStarts();
return this._lineStarts.getAccumulatedValue(position.lineNumber - 2) + position.column - 1;
}
public getPositionAt(offset: number): Position {
offset = Math.floor(offset);
offset = Math.max(0, offset);
this._ensureLineStarts();
let out = this._lineStarts.getIndexOf(offset);
let lineLength = this._lines[out.index].text.length;
// Ensure we return a valid position
return new Position(out.index + 1, Math.min(out.remainder + 1, lineLength + 1));
}
_increaseVersionId(): void {
this._setVersionId(this._versionId + 1);
}
_setVersionId(newVersionId:number): void {
this._versionId = newVersionId;
this._alternativeVersionId = this._versionId;
}
_overwriteAlternativeVersionId(newAlternativeVersionId:number): void {
this._alternativeVersionId = newAlternativeVersionId;
}
public isDisposed(): boolean {
return this._isDisposed;
}
public dispose(): void {
this._isDisposed = true;
// Null out members, such that any use of a disposed model will throw exceptions sooner rather than later
this._lines = null;
this._EOL = null;
this._BOM = null;
super.dispose();
}
_createContentChangedFlushEvent(): editorCommon.IModelContentChangedFlushEvent {
return {
changeType: editorCommon.EventType.ModelRawContentChangedFlush,
detail: null,
// TODO@Alex -> remove these fields from here
versionId: -1,
isUndoing: false,
isRedoing: false
};
}
protected _emitContentChanged2(startLineNumber:number, startColumn:number, endLineNumber:number, endColumn:number, rangeLength:number, text:string, isUndoing:boolean, isRedoing:boolean): void {
var e:editorCommon.IModelContentChangedEvent2 = {
range: new Range(startLineNumber, startColumn, endLineNumber, endColumn),
rangeLength: rangeLength,
text: text,
eol: this._EOL,
versionId: this.getVersionId(),
isUndoing: isUndoing,
isRedoing: isRedoing
};
if (!this._isDisposing) {
this.emit(editorCommon.EventType.ModelContentChanged2, e);
}
}
_resetValue(e:editorCommon.IModelContentChangedFlushEvent, newValue:editorCommon.IRawText): void {
this._constructLines(newValue);
this._increaseVersionId();
e.detail = this.toRawText();
e.versionId = this._versionId;
}
public toRawText(): editorCommon.IRawText {
return {
BOM: this._BOM,
EOL: this._EOL,
lines: this.getLinesContent(),
length: this.getValueLength(),
options: this._options
};
}
public equals(other: editorCommon.IRawText): boolean {
if (this._BOM !== other.BOM) {
return false;
}
if (this._EOL !== other.EOL) {
return false;
}
if (this._lines.length !== other.lines.length) {
return false;
}
for (let i = 0, len = this._lines.length; i < len; i++) {
if (this._lines[i].text !== other.lines[i]) {
return false;
}
}
return true;
}
public setValue(value:string): void {
if (value === null) {
// There's nothing to do
return;
}
let rawText: editorCommon.IRawText = null;
rawText = TextModel.toRawText(value, {
tabSize: this._options.tabSize,
insertSpaces: this._options.insertSpaces,
trimAutoWhitespace: this._options.trimAutoWhitespace,
detectIndentation: false,
defaultEOL: this._options.defaultEOL
});
this.setValueFromRawText(rawText);
}
public setValueFromRawText(newValue:editorCommon.IRawText): void {
if (newValue === null) {
// There's nothing to do
return;
}
var oldFullModelRange = this.getFullModelRange();
var oldModelValueLength = this.getValueLengthInRange(oldFullModelRange);
var endLineNumber = this.getLineCount();
var endColumn = this.getLineMaxColumn(endLineNumber);
var e = this._createContentChangedFlushEvent();
this._resetValue(e, newValue);
this._emitModelContentChangedFlushEvent(e);
this._emitContentChanged2(1, 1, endLineNumber, endColumn, oldModelValueLength, this.getValue(), false, false);
}
public getValue(eol?:editorCommon.EndOfLinePreference, preserveBOM:boolean=false): string {
var fullModelRange = this.getFullModelRange();
var fullModelValue = this.getValueInRange(fullModelRange, eol);
if (preserveBOM) {
return this._BOM + fullModelValue;
}
return fullModelValue;
}
public getValueLength(eol?: editorCommon.EndOfLinePreference, preserveBOM: boolean = false): number {
var fullModelRange = this.getFullModelRange();
var fullModelValue = this.getValueLengthInRange(fullModelRange, eol);
if (preserveBOM) {
return this._BOM.length + fullModelValue;
}
return fullModelValue;
}
public getEmptiedValueInRange(rawRange:editorCommon.IRange, fillCharacter: string = '', eol:editorCommon.EndOfLinePreference=editorCommon.EndOfLinePreference.TextDefined): string {
var range = this.validateRange(rawRange);
if (range.isEmpty()) {
return '';
}
if (range.startLineNumber === range.endLineNumber) {
return this._repeatCharacter(fillCharacter, range.endColumn - range.startColumn);
}
var lineEnding = this._getEndOfLine(eol),
startLineIndex = range.startLineNumber - 1,
endLineIndex = range.endLineNumber - 1,
resultLines:string[] = [];
resultLines.push(this._repeatCharacter(fillCharacter, this._lines[startLineIndex].text.length - range.startColumn + 1));
for (var i = startLineIndex + 1; i < endLineIndex; i++) {
resultLines.push(this._repeatCharacter(fillCharacter, this._lines[i].text.length));
}
resultLines.push(this._repeatCharacter(fillCharacter, range.endColumn - 1));
return resultLines.join(lineEnding);
}
private _repeatCharacter(fillCharacter:string, count:number): string {
var r = '';
for (var i = 0; i < count; i++) {
r += fillCharacter;
}
return r;
}
public getValueInRange(rawRange:editorCommon.IRange, eol:editorCommon.EndOfLinePreference=editorCommon.EndOfLinePreference.TextDefined): string {
var range = this.validateRange(rawRange);
if (range.isEmpty()) {
return '';
}
if (range.startLineNumber === range.endLineNumber) {
return this._lines[range.startLineNumber - 1].text.substring(range.startColumn - 1, range.endColumn - 1);
}
var lineEnding = this._getEndOfLine(eol),
startLineIndex = range.startLineNumber - 1,
endLineIndex = range.endLineNumber - 1,
resultLines:string[] = [];
resultLines.push(this._lines[startLineIndex].text.substring(range.startColumn - 1));
for (var i = startLineIndex + 1; i < endLineIndex; i++) {
resultLines.push(this._lines[i].text);
}
resultLines.push(this._lines[endLineIndex].text.substring(0, range.endColumn - 1));
return resultLines.join(lineEnding);
}
public getValueLengthInRange(rawRange:editorCommon.IRange, eol:editorCommon.EndOfLinePreference=editorCommon.EndOfLinePreference.TextDefined): number {
var range = this.validateRange(rawRange);
if (range.isEmpty()) {
return 0;
}
if (range.startLineNumber === range.endLineNumber) {
return (range.endColumn - range.startColumn);
}
let startOffset = this.getOffsetAt(new Position(range.startLineNumber, range.startColumn));
let endOffset = this.getOffsetAt(new Position(range.endLineNumber, range.endColumn));
return endOffset - startOffset;
}
public isDominatedByLongLines(): boolean {<|fim▁hole|> var smallLineCharCount = 0,
longLineCharCount = 0,
i: number,
len: number,
lines = this._lines,
lineLength: number;
for (i = 0, len = this._lines.length; i < len; i++) {
lineLength = lines[i].text.length;
if (lineLength >= LONG_LINE_BOUNDARY) {
longLineCharCount += lineLength;
} else {
smallLineCharCount += lineLength;
}
}
return (longLineCharCount > smallLineCharCount);
}
public getLineCount(): number {
return this._lines.length;
}
public getLineContent(lineNumber:number): string {
if (lineNumber < 1 || lineNumber > this.getLineCount()) {
throw new Error('Illegal value ' + lineNumber + ' for `lineNumber`');
}
return this._lines[lineNumber - 1].text;
}
public getLinesContent(): string[] {
var r: string[] = [];
for (var i = 0, len = this._lines.length; i < len; i++) {
r[i] = this._lines[i].text;
}
return r;
}
public getEOL(): string {
return this._EOL;
}
public setEOL(eol: editorCommon.EndOfLineSequence): void {
var newEOL = (eol === editorCommon.EndOfLineSequence.CRLF ? '\r\n' : '\n');
if (this._EOL === newEOL) {
// Nothing to do
return;
}
var oldFullModelRange = this.getFullModelRange();
var oldModelValueLength = this.getValueLengthInRange(oldFullModelRange);
var endLineNumber = this.getLineCount();
var endColumn = this.getLineMaxColumn(endLineNumber);
this._EOL = newEOL;
this._lineStarts = null;
this._increaseVersionId();
var e = this._createContentChangedFlushEvent();
e.detail = this.toRawText();
e.versionId = this._versionId;
this._emitModelContentChangedFlushEvent(e);
this._emitContentChanged2(1, 1, endLineNumber, endColumn, oldModelValueLength, this.getValue(), false, false);
}
public getLineMinColumn(lineNumber:number): number {
return 1;
}
public getLineMaxColumn(lineNumber:number): number {
if (lineNumber < 1 || lineNumber > this.getLineCount()) {
throw new Error('Illegal value ' + lineNumber + ' for `lineNumber`');
}
return this._lines[lineNumber - 1].text.length + 1;
}
public getLineFirstNonWhitespaceColumn(lineNumber: number): number {
if (lineNumber < 1 || lineNumber > this.getLineCount()) {
throw new Error('Illegal value ' + lineNumber + ' for `lineNumber`');
}
var result = strings.firstNonWhitespaceIndex(this._lines[lineNumber - 1].text);
if (result === -1) {
return 0;
}
return result + 1;
}
public getLineLastNonWhitespaceColumn(lineNumber: number): number {
if (lineNumber < 1 || lineNumber > this.getLineCount()) {
throw new Error('Illegal value ' + lineNumber + ' for `lineNumber`');
}
var result = strings.lastNonWhitespaceIndex(this._lines[lineNumber - 1].text);
if (result === -1) {
return 0;
}
return result + 2;
}
public validateLineNumber(lineNumber:number): number {
if (lineNumber < 1) {
lineNumber = 1;
}
if (lineNumber > this._lines.length) {
lineNumber = this._lines.length;
}
return lineNumber;
}
public validatePosition(position:editorCommon.IPosition): Position {
var lineNumber = position.lineNumber ? position.lineNumber : 1;
var column = position.column ? position.column : 1;
if (lineNumber < 1) {
lineNumber = 1;
column = 1;
}
else if (lineNumber > this._lines.length) {
lineNumber = this._lines.length;
column = this.getLineMaxColumn(lineNumber);
}
else {
var maxColumn = this.getLineMaxColumn(lineNumber);
if (column < 1) {
column = 1;
}
else if (column > maxColumn) {
column = maxColumn;
}
}
return new Position(lineNumber, column);
}
public validateRange(range:editorCommon.IRange): Range {
var start = this.validatePosition(new Position(range.startLineNumber, range.startColumn));
var end = this.validatePosition(new Position(range.endLineNumber, range.endColumn));
return new Range(start.lineNumber, start.column, end.lineNumber, end.column);
}
public modifyPosition(rawPosition: editorCommon.IPosition, offset: number) : Position {
return this.getPositionAt(this.getOffsetAt(rawPosition) + offset);
}
public getFullModelRange(): Range {
var lineCount = this.getLineCount();
return new Range(1, 1, lineCount, this.getLineMaxColumn(lineCount));
}
_emitModelContentChangedFlushEvent(e:editorCommon.IModelContentChangedFlushEvent): void {
if (!this._isDisposing) {
this.emit(editorCommon.EventType.ModelRawContentChanged, e);
}
}
public static toRawText(rawText:string, opts:editorCommon.ITextModelCreationOptions): editorCommon.IRawText {
// Count the number of lines that end with \r\n
var carriageReturnCnt = 0,
lastCarriageReturnIndex = -1;
while ((lastCarriageReturnIndex = rawText.indexOf('\r', lastCarriageReturnIndex + 1)) !== -1) {
carriageReturnCnt++;
}
// Split the text into liens
var lines = rawText.split(/\r\n|\r|\n/);
// Remove the BOM (if present)
var BOM = '';
if (strings.startsWithUTF8BOM(lines[0])) {
BOM = strings.UTF8_BOM_CHARACTER;
lines[0] = lines[0].substr(1);
}
var lineFeedCnt = lines.length - 1;
var EOL = '';
if (lineFeedCnt === 0) {
// This is an empty file or a file with precisely one line
EOL = (opts.defaultEOL === editorCommon.DefaultEndOfLine.LF ? '\n' : '\r\n');
} else if (carriageReturnCnt > lineFeedCnt / 2) {
// More than half of the file contains \r\n ending lines
EOL = '\r\n';
} else {
// At least one line more ends in \n
EOL = '\n';
}
let resolvedOpts: editorCommon.ITextModelResolvedOptions;
if (opts.detectIndentation) {
let guessedIndentation = guessIndentation(lines, opts.tabSize, opts.insertSpaces);
resolvedOpts = {
tabSize: guessedIndentation.tabSize,
insertSpaces: guessedIndentation.insertSpaces,
trimAutoWhitespace: opts.trimAutoWhitespace,
defaultEOL: opts.defaultEOL
};
} else {
resolvedOpts = {
tabSize: opts.tabSize,
insertSpaces: opts.insertSpaces,
trimAutoWhitespace: opts.trimAutoWhitespace,
defaultEOL: opts.defaultEOL
};
}
return {
BOM: BOM,
EOL: EOL,
lines: lines,
length: rawText.length,
options: resolvedOpts
};
}
_constructLines(rawText:editorCommon.IRawText): void {
var rawLines = rawText.lines,
modelLines: ModelLine[] = [],
i: number,
len: number;
for (i = 0, len = rawLines.length; i < len; i++) {
modelLines.push(new ModelLine(i + 1, rawLines[i]));
}
this._BOM = rawText.BOM;
this._EOL = rawText.EOL;
this._lines = modelLines;
this._lineStarts = null;
}
private _getEndOfLine(eol:editorCommon.EndOfLinePreference): string {
switch (eol) {
case editorCommon.EndOfLinePreference.LF:
return '\n';
case editorCommon.EndOfLinePreference.CRLF:
return '\r\n';
case editorCommon.EndOfLinePreference.TextDefined:
return this.getEOL();
}
throw new Error('Unknown EOL preference');
}
public findMatches(searchString:string, rawSearchScope:any, isRegex:boolean, matchCase:boolean, wholeWord:boolean, limitResultCount:number = LIMIT_FIND_COUNT): Range[] {
var regex = strings.createSafeRegExp(searchString, isRegex, matchCase, wholeWord);
if (!regex) {
return [];
}
var searchRange:Range;
if (Range.isIRange(rawSearchScope)) {
searchRange = rawSearchScope;
} else {
searchRange = this.getFullModelRange();
}
return this._doFindMatches(searchRange, regex, limitResultCount);
}
public findNextMatch(searchString:string, rawSearchStart:editorCommon.IPosition, isRegex:boolean, matchCase:boolean, wholeWord:boolean): Range {
var regex = strings.createSafeRegExp(searchString, isRegex, matchCase, wholeWord);
if (!regex) {
return null;
}
var searchStart = this.validatePosition(rawSearchStart),
lineCount = this.getLineCount(),
startLineNumber = searchStart.lineNumber,
text: string,
r: Range;
// Look in first line
text = this._lines[startLineNumber - 1].text.substring(searchStart.column - 1);
r = this._findMatchInLine(regex, text, startLineNumber, searchStart.column - 1);
if (r) {
return r;
}
for (var i = 1; i <= lineCount; i++) {
var lineIndex = (startLineNumber + i - 1) % lineCount;
text = this._lines[lineIndex].text;
r = this._findMatchInLine(regex, text, lineIndex + 1, 0);
if (r) {
return r;
}
}
return null;
}
public findPreviousMatch(searchString:string, rawSearchStart:editorCommon.IPosition, isRegex:boolean, matchCase:boolean, wholeWord:boolean): Range {
var regex = strings.createSafeRegExp(searchString, isRegex, matchCase, wholeWord);
if (!regex) {
return null;
}
var searchStart = this.validatePosition(rawSearchStart),
lineCount = this.getLineCount(),
startLineNumber = searchStart.lineNumber,
text: string,
r: Range;
// Look in first line
text = this._lines[startLineNumber - 1].text.substring(0, searchStart.column - 1);
r = this._findLastMatchInLine(regex, text, startLineNumber);
if (r) {
return r;
}
for (var i = 1; i <= lineCount; i++) {
var lineIndex = (lineCount + startLineNumber - i - 1) % lineCount;
text = this._lines[lineIndex].text;
r = this._findLastMatchInLine(regex, text, lineIndex + 1);
if (r) {
return r;
}
}
return null;
}
private _doFindMatches(searchRange:Range, searchRegex:RegExp, limitResultCount:number): Range[] {
var result:Range[] = [],
text: string,
counter = 0;
// Early case for a search range that starts & stops on the same line number
if (searchRange.startLineNumber === searchRange.endLineNumber) {
text = this._lines[searchRange.startLineNumber - 1].text.substring(searchRange.startColumn - 1, searchRange.endColumn - 1);
counter = this._findMatchesInLine(searchRegex, text, searchRange.startLineNumber, searchRange.startColumn - 1, counter, result, limitResultCount);
return result;
}
// Collect results from first line
text = this._lines[searchRange.startLineNumber - 1].text.substring(searchRange.startColumn - 1);
counter = this._findMatchesInLine(searchRegex, text, searchRange.startLineNumber, searchRange.startColumn - 1, counter, result, limitResultCount);
// Collect results from middle lines
for (var lineNumber = searchRange.startLineNumber + 1; lineNumber < searchRange.endLineNumber && counter < limitResultCount; lineNumber++) {
counter = this._findMatchesInLine(searchRegex, this._lines[lineNumber - 1].text, lineNumber, 0, counter, result, limitResultCount);
}
// Collect results from last line
if (counter < limitResultCount) {
text = this._lines[searchRange.endLineNumber - 1].text.substring(0, searchRange.endColumn - 1);
counter = this._findMatchesInLine(searchRegex, text, searchRange.endLineNumber, 0, counter, result, limitResultCount);
}
return result;
}
private _findMatchInLine(searchRegex:RegExp, text:string, lineNumber:number, deltaOffset:number): Range {
var m = searchRegex.exec(text);
if (!m) {
return null;
}
return new Range(lineNumber, m.index + 1 + deltaOffset, lineNumber, m.index + 1 + m[0].length + deltaOffset);
}
private _findLastMatchInLine(searchRegex:RegExp, text:string, lineNumber:number): Range {
let bestResult: Range = null;
let m:RegExpExecArray;
while ((m = searchRegex.exec(text))) {
let result = new Range(lineNumber, m.index + 1, lineNumber, m.index + 1 + m[0].length);
if (result.equalsRange(bestResult)) {
break;
}
bestResult = result;
}
return bestResult;
}
private _findMatchesInLine(searchRegex:RegExp, text:string, lineNumber:number, deltaOffset:number, counter:number, result:Range[], limitResultCount:number): number {
var m:RegExpExecArray;
// Reset regex to search from the beginning
searchRegex.lastIndex = 0;
do {
m = searchRegex.exec(text);
if (m) {
var range = new Range(lineNumber, m.index + 1 + deltaOffset, lineNumber, m.index + 1 + m[0].length + deltaOffset);
// Exit early if the regex matches the same range
if (range.equalsRange(result[result.length - 1])) {
return counter;
}
result.push(range);
counter++;
if (counter >= limitResultCount) {
return counter;
}
}
} while(m);
return counter;
}
}
export class RawText {
public static fromString(rawText:string, opts:editorCommon.ITextModelCreationOptions): editorCommon.IRawText {
return TextModel.toRawText(rawText, opts);
}
public static fromStringWithModelOptions(rawText:string, model:editorCommon.IModel): editorCommon.IRawText {
let opts = model.getOptions();
return TextModel.toRawText(rawText, {
tabSize: opts.tabSize,
insertSpaces: opts.insertSpaces,
trimAutoWhitespace: opts.trimAutoWhitespace,
detectIndentation: false,
defaultEOL: opts.defaultEOL
});
}
}<|fim▁end|> | |
<|file_name|>col.js<|end_file_name|><|fim▁begin|>import React, { Component, } from 'react';
import {
StyleSheet,
View,
} from 'react-native';
export default class Col extends Component {
render() {
return (
<View style={[styles.col, { flex: parseInt(this.props.span) }, this.props.style]}>{this.props.children}</View>
)
}
}
const styles = StyleSheet.create({
col: {<|fim▁hole|><|fim▁end|> | }
}); |
<|file_name|>SpongeGoalType.java<|end_file_name|><|fim▁begin|>/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.entity.ai.goal;
import org.spongepowered.api.entity.ai.goal.Goal;
import org.spongepowered.api.entity.ai.goal.GoalType;
import org.spongepowered.api.entity.living.Agent;
public final class SpongeGoalType implements GoalType {
private final Class<? extends Goal<? extends Agent>> goalClass;
public SpongeGoalType(final Class<? extends Goal<? extends Agent>> goalClass) {
this.goalClass = goalClass;
}
@Override
public Class<? extends Goal<?>> goalClass() {
return this.goalClass;<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>prime_numbers3.py<|end_file_name|><|fim▁begin|>from math import *
testing_number = int(1)
sum_of_logs = log(2)
n = int(input('Choose a number: '))
while testing_number < n:
testing_number = testing_number + 2
for checking_number in range(2, testing_number):
if testing_number % checking_number == 0:
break
else:<|fim▁hole|><|fim▁end|> | sum_of_logs = sum_of_logs + testing_number
print(sum_of_logs/n)
print('The sum of the logs is:', sum_of_logs) |
<|file_name|>issue-17651.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that moves of unsized values within closures are caught
// and rejected.
fn main() {
(|| box *[0u].as_slice())();
//~^ ERROR cannot move out of dereference
//~^^ ERROR cannot move a value of type [uint]<|fim▁hole|><|fim▁end|> | } |
<|file_name|>main_generator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#!/usr/bin/python
#
# This is derived from a cadquery script for generating PDIP models in X3D format
#
# from https://bitbucket.org/hyOzd/freecad-macros
# author hyOzd
# This is a
# Dimensions are from Microchips Packaging Specification document:
# DS00000049BY. Body drawing is the same as QFP generator#
## requirements
## cadquery FreeCAD plugin
## https://github.com/jmwright/cadquery-freecad-module
## to run the script just do: freecad main_generator.py modelName
## e.g. c:\freecad\bin\freecad main_generator.py DIP8
## the script will generate STEP and VRML parametric models
## to be used with kicad StepUp script
#* These are a FreeCAD & cadquery tools *
#* to export generated models in STEP & VRML format. *
#* *
#* cadquery script for generating QFP/SOIC/SSOP/TSSOP models in STEP AP214 *
#* Copyright (c) 2015 *
#* Maurice https://launchpad.net/~easyw *
#* All trademarks within this guide belong to their legitimate owners. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., *
#* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA *
#* *
#****************************************************************************
__title__ = "make Valve 3D models"
__author__ = "Stefan, based on Valve script"
__Comment__ = 'make varistor 3D models exported to STEP and VRML for Kicad StepUP script'
___ver___ = "1.3.3 14/08/2015"
# maui import cadquery as cq
# maui from Helpers import show
from collections import namedtuple
import math
import sys, os
import datetime
from datetime import datetime
sys.path.append("../_tools")
import exportPartToVRML as expVRML
import shaderColors
# maui start
import FreeCAD, Draft, FreeCADGui
import ImportGui
import FreeCADGui as Gui
#from Gui.Command import *
outdir=os.path.dirname(os.path.realpath(__file__) + os.sep + '..' + os.sep + '_3Dmodels')
scriptdir=os.path.dirname(os.path.realpath(__file__))
sys.path.append(outdir)
sys.path.append(scriptdir)
if FreeCAD.GuiUp:
from PySide import QtCore, QtGui
# Licence information of the generated models.
#################################################################################################
STR_licAuthor = "kicad StepUp"
STR_licEmail = "ksu"
STR_licOrgSys = "kicad StepUp"
STR_licPreProc = "OCC"
STR_licOrg = "FreeCAD"
#################################################################################################
import cq_belfuse # modules parameters
from cq_belfuse import *
import cq_keystone # modules parameters
from cq_keystone import *
import cq_bulgin # modules parameters
from cq_bulgin import *
import cq_schurter # modules parameters
from cq_schurter import *
import cq_tme # modules parameters
from cq_tme import *
import cq_littlefuse # modules parameters
from cq_littlefuse import *
different_models = [
cq_belfuse(),
cq_keystone(),
cq_bulgin(),
cq_schurter(),
cq_tme(),
cq_littlefuse(),
]
def make_3D_model(models_dir, model_class, modelID):
LIST_license = ["",]
CheckedmodelName = 'A_' + modelID.replace('.', '').replace('-', '_').replace('(', '').replace(')', '')
CheckedmodelName = CheckedmodelName
Newdoc = App.newDocument(CheckedmodelName)
App.setActiveDocument(CheckedmodelName)
Gui.ActiveDocument=Gui.getDocument(CheckedmodelName)
destination_dir = model_class.get_dest_3D_dir(modelID)
material_substitutions = model_class.make_3D_model(modelID)
modelName = model_class.get_model_name(modelID)
doc = FreeCAD.ActiveDocument
doc.Label = CheckedmodelName
objs=GetListOfObjects(FreeCAD, doc)
objs[0].Label = CheckedmodelName
restore_Main_Tools()
script_dir=os.path.dirname(os.path.realpath(__file__))
expVRML.say(models_dir)
out_dir=models_dir+os.sep+destination_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
exportSTEP(doc, modelName, out_dir)
if LIST_license[0]=="":
LIST_license=Lic.LIST_int_license
LIST_license.append("")
Lic.addLicenseToStep(out_dir + os.sep, modelName+".step", LIST_license,\
STR_licAuthor, STR_licEmail, STR_licOrgSys, STR_licOrg, STR_licPreProc)
# scale and export Vrml model
scale=1/2.54
#exportVRML(doc,modelName,scale,out_dir)
del objs
objs=GetListOfObjects(FreeCAD, doc)
expVRML.say("######################################################################")
expVRML.say(objs)<|fim▁hole|> export_file_name=out_dir+os.sep+modelName+'.wrl'
colored_meshes = expVRML.getColoredMesh(Gui, export_objects , scale)
#expVRML.writeVRMLFile(colored_meshes, export_file_name, used_color_keys)# , LIST_license
expVRML.writeVRMLFile(colored_meshes, export_file_name, used_color_keys, LIST_license)
#scale=0.3937001
#exportVRML(doc,modelName,scale,out_dir)
# Save the doc in Native FC format
saveFCdoc(App, Gui, doc, modelName,out_dir)
#display BBox
Gui.activateWorkbench("PartWorkbench")
Gui.SendMsgToActiveView("ViewFit")
Gui.activeDocument().activeView().viewAxometric()
#FreeCADGui.ActiveDocument.activeObject.BoundingBox = True
def run():
## # get variant names from command line
return
#import step_license as L
import add_license as Lic
# when run from command line
if __name__ == "__main__" or __name__ == "main_generator":
FreeCAD.Console.PrintMessage('\r\nRunning...\r\n')
full_path=os.path.realpath(__file__)
expVRML.say(full_path)
scriptdir=os.path.dirname(os.path.realpath(__file__))
expVRML.say(scriptdir)
sub_path = full_path.split(scriptdir)
expVRML.say(sub_path)
sub_dir_name =full_path.split(os.sep)[-2]
expVRML.say(sub_dir_name)
sub_path = full_path.split(sub_dir_name)[0]
expVRML.say(sub_path)
models_dir=sub_path+"_3Dmodels"
model_to_build = ''
if len(sys.argv) < 3:
FreeCAD.Console.PrintMessage('No variant name is given, add a valid model name as an argument or the argument "all"\r\n')
sys.exit()
else:
model_to_build=sys.argv[2]
found_one = False
if len(model_to_build) > 0:
if model_to_build == 'all' or model_to_build == 'All' or model_to_build == 'ALL':
found_one = True
for n in different_models:
listall = n.get_list_all()
for i in listall:
make_3D_model(models_dir, n, i)
elif model_to_build == 'list':
found_one = True
FreeCAD.Console.PrintMessage('\r\n')
for n in different_models:
listall = n.get_list_all()
for i in listall:
FreeCAD.Console.PrintMessage(i + '\r\n')
else:
for n in different_models:
if n.model_exist(model_to_build):
found_one = True
make_3D_model(models_dir, n, model_to_build)
if not found_one:
print("Parameters for %s doesn't exist, skipping. " % model_to_build)<|fim▁end|> | expVRML.say("######################################################################")
export_objects, used_color_keys = expVRML.determineColors(Gui, objs, material_substitutions) |
<|file_name|>people.py<|end_file_name|><|fim▁begin|># Recall is a program for storing bookmarks of different things
# Copyright (C) 2012 Cal Paterson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from bottle import Bottle, request, response, abort
import bcrypt
from recall.data import whitelist, blacklist
from recall import convenience as c
from recall import plugins, jobs, messages
app = Bottle()
app.install(plugins.exceptions)
app.install(plugins.ppjson)
app.install(plugins.auth)
app.install(plugins.cors)
app.error_handler = plugins.handler_dict
logger = c.logger("people")
@app.get("/")
def users():
abort(503, "Not yet implemented")
@app.get("/<who>/")
def user_(who):
try:
return whitelist(c.db().users.find_one({"email": who}), [
"email",
"firstName",
"pseudonym"
])
except TypeError:
logger.warn("Asked about {email}, but that is not a user".format(
email=who))
abort(404, "User not found")
@app.get("/<who>/self")
def _self(who, user):
if who != user["email"]:
response.status = 400
else:
return whitelist(user, ["pseudonym",
"firstName",
"surname",
"email",
"private_email"])
@app.post("/<who>/")
def request_invite(who):
# FIXME: Don't allow the pseudonym "public"
user = whitelist(request.json, [
"pseudonym",
"firstName",
"surname",
"private_email",
"token",
])
if "private_email" not in user:
abort(400, "You must provide a private_email field")
user["email_key"] = str(uuid.uuid4())
user["registered"] = c.unixtime()
user["email"] = who
c.db().users.ensure_index("email", unique=True)
c.db().users.insert(user, safe=True)
response.status = 202
logger.info("{email} subscribed".format(email=who))
jobs.enqueue(messages.SendInvite(user))
@app.post("/<who>/<email_key>")
def verify_email(who, email_key):
if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings:
salt = bcrypt.gensalt(1)<|fim▁hole|>
spec = {"email_key": email_key, "verified": {"$exists": False}}
update = {"$set": {"password_hash": password_hash,
"verified": c.unixtime()}}
success = c.db().users.update(spec, update, safe=True)["updatedExisting"]
if not success:
if c.db().users.find_one({"email_key": email_key}):
logger.warn("{email} tried to verify a second time".format(email=who))
abort(403, "Already verified")
else:
logger.warn("Someone tried to verify with a key, but it doesn't exist")
abort(404, "Don't know that key")
user = c.db().users.find_one({"email_key": email_key})
response.status = 201
return blacklist(user, ["_id", "email_key", "password_hash"])<|fim▁end|> | else:
salt = bcrypt.gensalt()
password_hash = bcrypt.hashpw(request.json["password"], salt) |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""
Support for Eneco Slimmer stekkers (Smart Plugs).
This provides controls for the z-wave smart plugs Toon can control.
"""
import logging
from homeassistant.components.switch import SwitchDevice
import custom_components.toon as toon_main
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup discovered Smart Plugs."""
_toon_main = hass.data[toon_main.TOON_HANDLE]
switch_items = []
for plug in _toon_main.toon.smartplugs:
switch_items.append(EnecoSmartPlug(hass, plug))
add_devices_callback(switch_items)
class EnecoSmartPlug(SwitchDevice):
"""Representation of a Smart Plug."""
def __init__(self, hass, plug):
"""Initialize the Smart Plug."""
self.smartplug = plug
self.toon_data_store = hass.data[toon_main.TOON_HANDLE]
@property
def should_poll(self):
"""No polling needed with subscriptions."""
return True
@property
def unique_id(self):
"""Return the ID of this switch."""
return self.smartplug.device_uuid
@property
def name(self):
"""Return the name of the switch if any."""
return self.smartplug.name
@property
def current_power_w(self):
"""Current power usage in W."""
return self.toon_data_store.get_data('current_power', self.name)
@property
def today_energy_kwh(self):
"""Today total energy usage in kWh."""<|fim▁hole|> @property
def is_on(self):
"""Return true if switch is on. Standby is on."""
return self.toon_data_store.get_data('current_state', self.name)
@property
def available(self):
"""True if switch is available."""
return self.smartplug.can_toggle
def turn_on(self, **kwargs):
"""Turn the switch on."""
return self.smartplug.turn_on()
def turn_off(self):
"""Turn the switch off."""
return self.smartplug.turn_off()
def update(self):
"""Update state."""
self.toon_data_store.update()<|fim▁end|> | return self.toon_data_store.get_data('today_energy', self.name)
|
<|file_name|>effectsList.js<|end_file_name|><|fim▁begin|>var EffectsList = [
['复古效果', 'sketch', 'dorsy', '2013-10-12'],
['黄色调效果', 'yellowStyle', 'dorsy', '2013-10-12'],
['缩小', 'mini', 'dorsy', '2013-10-12']
];
var EffectTmp = ' <li data-ename="{name}">\<|fim▁hole|> <span class="author lightFont">{author}</span>\
<span class="date lightFont">{date}</span>\
</div>\
</li>\
';<|fim▁end|> | <img src="style/image/effect/{name}.png" />\
<h3>{cnname}</h3>\
<div class="itemInfo">\ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import print_function<|fim▁hole|>from .Console import *<|fim▁end|> | |
<|file_name|>application_gateway_web_application_firewall_configuration.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGatewayWebApplicationFirewallConfiguration(Model):
"""Application gateway web application firewall configuration.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Whether the web application firewall is enabled
or not.
:type enabled: bool
:param firewall_mode: Required. Web application firewall mode. Possible
values include: 'Detection', 'Prevention'
:type firewall_mode: str or
~azure.mgmt.network.v2017_11_01.models.ApplicationGatewayFirewallMode
:param rule_set_type: Required. The type of the web application firewall
rule set. Possible values are: 'OWASP'.
:type rule_set_type: str
:param rule_set_version: Required. The version of the rule set type.
:type rule_set_version: str
:param disabled_rule_groups: The disabled rule groups.
:type disabled_rule_groups:
list[~azure.mgmt.network.v2017_11_01.models.ApplicationGatewayFirewallDisabledRuleGroup]
"""
_validation = {
'enabled': {'required': True},
'firewall_mode': {'required': True},
'rule_set_type': {'required': True},
'rule_set_version': {'required': True},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'firewall_mode': {'key': 'firewallMode', 'type': 'str'},
'rule_set_type': {'key': 'ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'ruleSetVersion', 'type': 'str'},
'disabled_rule_groups': {'key': 'disabledRuleGroups', 'type': '[ApplicationGatewayFirewallDisabledRuleGroup]'},
}
def __init__(self, **kwargs):<|fim▁hole|> super(ApplicationGatewayWebApplicationFirewallConfiguration, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.firewall_mode = kwargs.get('firewall_mode', None)
self.rule_set_type = kwargs.get('rule_set_type', None)
self.rule_set_version = kwargs.get('rule_set_version', None)
self.disabled_rule_groups = kwargs.get('disabled_rule_groups', None)<|fim▁end|> | |
<|file_name|>proxy_service.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/proxy/proxy_service.h"
#include <algorithm>
#include <cmath>
#include <utility>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/compiler_specific.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/metrics/histogram_macros.h"
#include "base/metrics/sparse_histogram.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/string_util.h"
#include "base/thread_task_runner_handle.h"
#include "base/time/time.h"
#include "base/values.h"
#include "net/base/completion_callback.h"
#include "net/base/load_flags.h"
#include "net/base/net_errors.h"
#include "net/base/proxy_delegate.h"
#include "net/base/url_util.h"
#include "net/log/net_log.h"
#include "net/proxy/dhcp_proxy_script_fetcher.h"
#include "net/proxy/multi_threaded_proxy_resolver.h"
#include "net/proxy/proxy_config_service_fixed.h"
#include "net/proxy/proxy_resolver.h"
#include "net/proxy/proxy_resolver_factory.h"
#include "net/proxy/proxy_script_decider.h"
#include "net/proxy/proxy_script_fetcher.h"
#include "net/url_request/url_request_context.h"
#include "url/gurl.h"
#if defined(OS_WIN)
#include "net/proxy/proxy_config_service_win.h"
#include "net/proxy/proxy_resolver_winhttp.h"
#elif defined(OS_IOS)
#include "net/proxy/proxy_config_service_ios.h"
#include "net/proxy/proxy_resolver_mac.h"
#elif defined(OS_MACOSX)
#include "net/proxy/proxy_config_service_mac.h"
#include "net/proxy/proxy_resolver_mac.h"
#elif defined(OS_LINUX) && !defined(OS_CHROMEOS)
#include "net/proxy/proxy_config_service_linux.h"
#elif defined(OS_ANDROID)
#include "net/proxy/proxy_config_service_android.h"
#endif
using base::TimeDelta;
using base::TimeTicks;
namespace net {
namespace {
// When the IP address changes we don't immediately re-run proxy auto-config.
// Instead, we wait for |kDelayAfterNetworkChangesMs| before
// attempting to re-valuate proxy auto-config.
//
// During this time window, any resolve requests sent to the ProxyService will
// be queued. Once we have waited the required amount of them, the proxy
// auto-config step will be run, and the queued requests resumed.
//
// The reason we play this game is that our signal for detecting network
// changes (NetworkChangeNotifier) may fire *before* the system's networking
// dependencies are fully configured. This is a problem since it means if
// we were to run proxy auto-config right away, it could fail due to spurious
// DNS failures. (see http://crbug.com/50779 for more details.)
//
// By adding the wait window, we give things a better chance to get properly
// set up. Network failures can happen at any time though, so we additionally
// poll the PAC script for changes, which will allow us to recover from these
// sorts of problems.
const int64_t kDelayAfterNetworkChangesMs = 2000;
// This is the default policy for polling the PAC script.
//
// In response to a failure, the poll intervals are:
// 0: 8 seconds (scheduled on timer)
// 1: 32 seconds
// 2: 2 minutes
// 3+: 4 hours
//
// In response to a success, the poll intervals are:
// 0+: 12 hours
//
// Only the 8 second poll is scheduled on a timer, the rest happen in response
// to network activity (and hence will take longer than the written time).
//
// Explanation for these values:
//
// TODO(eroman): These values are somewhat arbitrary, and need to be tuned
// using some histograms data. Trying to be conservative so as not to break
// existing setups when deployed. A simple exponential retry scheme would be
// more elegant, but places more load on server.
//
// The motivation for trying quickly after failures (8 seconds) is to recover
// from spurious network failures, which are common after the IP address has
// just changed (like DNS failing to resolve). The next 32 second boundary is
// to try and catch other VPN weirdness which anecdotally I have seen take
// 10+ seconds for some users.
//
// The motivation for re-trying after a success is to check for possible
// content changes to the script, or to the WPAD auto-discovery results. We are
// not very aggressive with these checks so as to minimize the risk of
// overloading existing PAC setups. Moreover it is unlikely that PAC scripts
// change very frequently in existing setups. More research is needed to
// motivate what safe values are here, and what other user agents do.
//
// Comparison to other browsers:
//
// In Firefox the PAC URL is re-tried on failures according to
// network.proxy.autoconfig_retry_interval_min and
// network.proxy.autoconfig_retry_interval_max. The defaults are 5 seconds and
// 5 minutes respectively. It doubles the interval at each attempt.
//
// TODO(eroman): Figure out what Internet Explorer does.
class DefaultPollPolicy : public ProxyService::PacPollPolicy {
public:
DefaultPollPolicy() {}
Mode GetNextDelay(int initial_error,
TimeDelta current_delay,
TimeDelta* next_delay) const override {
if (initial_error != OK) {
// Re-try policy for failures.
const int kDelay1Seconds = 8;
const int kDelay2Seconds = 32;
const int kDelay3Seconds = 2 * 60; // 2 minutes
const int kDelay4Seconds = 4 * 60 * 60; // 4 Hours
// Initial poll.
if (current_delay < TimeDelta()) {
*next_delay = TimeDelta::FromSeconds(kDelay1Seconds);
return MODE_USE_TIMER;
}
switch (current_delay.InSeconds()) {
case kDelay1Seconds:
*next_delay = TimeDelta::FromSeconds(kDelay2Seconds);
return MODE_START_AFTER_ACTIVITY;
case kDelay2Seconds:
*next_delay = TimeDelta::FromSeconds(kDelay3Seconds);
return MODE_START_AFTER_ACTIVITY;
default:
*next_delay = TimeDelta::FromSeconds(kDelay4Seconds);
return MODE_START_AFTER_ACTIVITY;
}
} else {
// Re-try policy for succeses.
*next_delay = TimeDelta::FromHours(12);
return MODE_START_AFTER_ACTIVITY;
}
}
private:
DISALLOW_COPY_AND_ASSIGN(DefaultPollPolicy);
};
// Config getter that always returns direct settings.
class ProxyConfigServiceDirect : public ProxyConfigService {
public:
// ProxyConfigService implementation:
void AddObserver(Observer* observer) override {}
void RemoveObserver(Observer* observer) override {}
ConfigAvailability GetLatestProxyConfig(ProxyConfig* config) override {
*config = ProxyConfig::CreateDirect();
config->set_source(PROXY_CONFIG_SOURCE_UNKNOWN);
return CONFIG_VALID;
}
};
// Proxy resolver that fails every time.
class ProxyResolverNull : public ProxyResolver {
public:
ProxyResolverNull() {}
// ProxyResolver implementation.
int GetProxyForURL(const GURL& url,
ProxyInfo* results,
const CompletionCallback& callback,
RequestHandle* request,
const BoundNetLog& net_log) override {
return ERR_NOT_IMPLEMENTED;
}
void CancelRequest(RequestHandle request) override { NOTREACHED(); }
LoadState GetLoadState(RequestHandle request) const override {
NOTREACHED();
return LOAD_STATE_IDLE;
}
};
// ProxyResolver that simulates a PAC script which returns
// |pac_string| for every single URL.
class ProxyResolverFromPacString : public ProxyResolver {
public:
explicit ProxyResolverFromPacString(const std::string& pac_string)
: pac_string_(pac_string) {}
int GetProxyForURL(const GURL& url,
ProxyInfo* results,
const CompletionCallback& callback,
RequestHandle* request,
const BoundNetLog& net_log) override {
results->UsePacString(pac_string_);
return OK;
}
void CancelRequest(RequestHandle request) override { NOTREACHED(); }
LoadState GetLoadState(RequestHandle request) const override {
NOTREACHED();
return LOAD_STATE_IDLE;
}
private:
const std::string pac_string_;
};
// Creates ProxyResolvers using a platform-specific implementation.
class ProxyResolverFactoryForSystem : public MultiThreadedProxyResolverFactory {
public:
explicit ProxyResolverFactoryForSystem(size_t max_num_threads)
: MultiThreadedProxyResolverFactory(max_num_threads,
false /*expects_pac_bytes*/) {}
scoped_ptr<ProxyResolverFactory> CreateProxyResolverFactory() override {
#if defined(OS_WIN)
return make_scoped_ptr(new ProxyResolverFactoryWinHttp());
#elif defined(OS_MACOSX)
return make_scoped_ptr(new ProxyResolverFactoryMac());
#else
NOTREACHED();
return NULL;
#endif
}
static bool IsSupported() {
#if defined(OS_WIN) || defined(OS_MACOSX)
return true;
#else
return false;
#endif
}
private:
DISALLOW_COPY_AND_ASSIGN(ProxyResolverFactoryForSystem);
};
class ProxyResolverFactoryForNullResolver : public ProxyResolverFactory {
public:
ProxyResolverFactoryForNullResolver() : ProxyResolverFactory(false) {}
// ProxyResolverFactory overrides.
int CreateProxyResolver(
const scoped_refptr<ProxyResolverScriptData>& pac_script,
scoped_ptr<ProxyResolver>* resolver,
const net::CompletionCallback& callback,
scoped_ptr<Request>* request) override {
resolver->reset(new ProxyResolverNull());
return OK;
}
private:
DISALLOW_COPY_AND_ASSIGN(ProxyResolverFactoryForNullResolver);
};
class ProxyResolverFactoryForPacResult : public ProxyResolverFactory {
public:
explicit ProxyResolverFactoryForPacResult(const std::string& pac_string)
: ProxyResolverFactory(false), pac_string_(pac_string) {}
// ProxyResolverFactory override.
int CreateProxyResolver(
const scoped_refptr<ProxyResolverScriptData>& pac_script,
scoped_ptr<ProxyResolver>* resolver,
const net::CompletionCallback& callback,
scoped_ptr<Request>* request) override {
resolver->reset(new ProxyResolverFromPacString(pac_string_));
return OK;
}
private:
const std::string pac_string_;
DISALLOW_COPY_AND_ASSIGN(ProxyResolverFactoryForPacResult);
};
// Returns NetLog parameters describing a proxy configuration change.
scoped_ptr<base::Value> NetLogProxyConfigChangedCallback(
const ProxyConfig* old_config,
const ProxyConfig* new_config,
NetLogCaptureMode /* capture_mode */) {
scoped_ptr<base::DictionaryValue> dict(new base::DictionaryValue());
// The "old_config" is optional -- the first notification will not have
// any "previous" configuration.
if (old_config->is_valid())
dict->Set("old_config", old_config->ToValue());
dict->Set("new_config", new_config->ToValue());
return std::move(dict);
}
scoped_ptr<base::Value> NetLogBadProxyListCallback(
const ProxyRetryInfoMap* retry_info,
NetLogCaptureMode /* capture_mode */) {
scoped_ptr<base::DictionaryValue> dict(new base::DictionaryValue());
base::ListValue* list = new base::ListValue();
for (ProxyRetryInfoMap::const_iterator iter = retry_info->begin();
iter != retry_info->end(); ++iter) {
list->Append(new base::StringValue(iter->first));
}
dict->Set("bad_proxy_list", list);
return std::move(dict);
}
// Returns NetLog parameters on a successfuly proxy resolution.
scoped_ptr<base::Value> NetLogFinishedResolvingProxyCallback(
const ProxyInfo* result,
NetLogCaptureMode /* capture_mode */) {
scoped_ptr<base::DictionaryValue> dict(new base::DictionaryValue());
dict->SetString("pac_string", result->ToPacString());
return std::move(dict);
}
#if defined(OS_CHROMEOS)
class UnsetProxyConfigService : public ProxyConfigService {
public:
UnsetProxyConfigService() {}
~UnsetProxyConfigService() override {}
void AddObserver(Observer* observer) override {}
void RemoveObserver(Observer* observer) override {}
ConfigAvailability GetLatestProxyConfig(ProxyConfig* config) override {
return CONFIG_UNSET;
}
};
#endif
} // namespace
// ProxyService::InitProxyResolver --------------------------------------------
// This glues together two asynchronous steps:
// (1) ProxyScriptDecider -- try to fetch/validate a sequence of PAC scripts
// to figure out what we should configure against.
// (2) Feed the fetched PAC script into the ProxyResolver.
//
// InitProxyResolver is a single-use class which encapsulates cancellation as
// part of its destructor. Start() or StartSkipDecider() should be called just
// once. The instance can be destroyed at any time, and the request will be
// cancelled.
class ProxyService::InitProxyResolver {
public:
InitProxyResolver()
: proxy_resolver_factory_(nullptr),
proxy_resolver_(NULL),
next_state_(STATE_NONE),
quick_check_enabled_(true) {}
~InitProxyResolver() {
// Note that the destruction of ProxyScriptDecider will automatically cancel
// any outstanding work.
}
// Begins initializing the proxy resolver; calls |callback| when done. A
// ProxyResolver instance will be created using |proxy_resolver_factory| and
// returned via |proxy_resolver| if the final result is OK.
int Start(scoped_ptr<ProxyResolver>* proxy_resolver,
ProxyResolverFactory* proxy_resolver_factory,
ProxyScriptFetcher* proxy_script_fetcher,
DhcpProxyScriptFetcher* dhcp_proxy_script_fetcher,
NetLog* net_log,
const ProxyConfig& config,
TimeDelta wait_delay,
const CompletionCallback& callback) {
DCHECK_EQ(STATE_NONE, next_state_);
proxy_resolver_ = proxy_resolver;
proxy_resolver_factory_ = proxy_resolver_factory;
decider_.reset(new ProxyScriptDecider(
proxy_script_fetcher, dhcp_proxy_script_fetcher, net_log));
decider_->set_quick_check_enabled(quick_check_enabled_);
config_ = config;
wait_delay_ = wait_delay;
callback_ = callback;
next_state_ = STATE_DECIDE_PROXY_SCRIPT;
return DoLoop(OK);
}
// Similar to Start(), however it skips the ProxyScriptDecider stage. Instead
// |effective_config|, |decider_result| and |script_data| will be used as the
// inputs for initializing the ProxyResolver. A ProxyResolver instance will
// be created using |proxy_resolver_factory| and returned via
// |proxy_resolver| if the final result is OK.
int StartSkipDecider(scoped_ptr<ProxyResolver>* proxy_resolver,
ProxyResolverFactory* proxy_resolver_factory,
const ProxyConfig& effective_config,
int decider_result,
ProxyResolverScriptData* script_data,
const CompletionCallback& callback) {
DCHECK_EQ(STATE_NONE, next_state_);
proxy_resolver_ = proxy_resolver;
proxy_resolver_factory_ = proxy_resolver_factory;
effective_config_ = effective_config;
script_data_ = script_data;
callback_ = callback;
if (decider_result != OK)
return decider_result;
next_state_ = STATE_CREATE_RESOLVER;
return DoLoop(OK);
}
// Returns the proxy configuration that was selected by ProxyScriptDecider.
// Should only be called upon completion of the initialization.
const ProxyConfig& effective_config() const {
DCHECK_EQ(STATE_NONE, next_state_);
return effective_config_;
}
// Returns the PAC script data that was selected by ProxyScriptDecider.
// Should only be called upon completion of the initialization.
const scoped_refptr<ProxyResolverScriptData>& script_data() {
DCHECK_EQ(STATE_NONE, next_state_);
return script_data_;
}
LoadState GetLoadState() const {
if (next_state_ == STATE_DECIDE_PROXY_SCRIPT_COMPLETE) {
// In addition to downloading, this state may also include the stall time
// after network change events (kDelayAfterNetworkChangesMs).
return LOAD_STATE_DOWNLOADING_PROXY_SCRIPT;
}
return LOAD_STATE_RESOLVING_PROXY_FOR_URL;
}
void set_quick_check_enabled(bool enabled) { quick_check_enabled_ = enabled; }
bool quick_check_enabled() const { return quick_check_enabled_; }
private:
enum State {
STATE_NONE,
STATE_DECIDE_PROXY_SCRIPT,
STATE_DECIDE_PROXY_SCRIPT_COMPLETE,
STATE_CREATE_RESOLVER,
STATE_CREATE_RESOLVER_COMPLETE,
};
int DoLoop(int result) {
DCHECK_NE(next_state_, STATE_NONE);
int rv = result;
do {
State state = next_state_;
next_state_ = STATE_NONE;
switch (state) {
case STATE_DECIDE_PROXY_SCRIPT:
DCHECK_EQ(OK, rv);
rv = DoDecideProxyScript();
break;
case STATE_DECIDE_PROXY_SCRIPT_COMPLETE:
rv = DoDecideProxyScriptComplete(rv);
break;
case STATE_CREATE_RESOLVER:
DCHECK_EQ(OK, rv);
rv = DoCreateResolver();
break;
case STATE_CREATE_RESOLVER_COMPLETE:
rv = DoCreateResolverComplete(rv);
break;
default:
NOTREACHED() << "bad state: " << state;
rv = ERR_UNEXPECTED;
break;
}
} while (rv != ERR_IO_PENDING && next_state_ != STATE_NONE);
return rv;
}
int DoDecideProxyScript() {
next_state_ = STATE_DECIDE_PROXY_SCRIPT_COMPLETE;
return decider_->Start(
config_, wait_delay_, proxy_resolver_factory_->expects_pac_bytes(),
base::Bind(&InitProxyResolver::OnIOCompletion, base::Unretained(this)));
}
int DoDecideProxyScriptComplete(int result) {
if (result != OK)
return result;
effective_config_ = decider_->effective_config();
script_data_ = decider_->script_data();
next_state_ = STATE_CREATE_RESOLVER;
return OK;
}
int DoCreateResolver() {
DCHECK(script_data_.get());
// TODO(eroman): Should log this latency to the NetLog.<|fim▁hole|> script_data_, proxy_resolver_,
base::Bind(&InitProxyResolver::OnIOCompletion, base::Unretained(this)),
&create_resolver_request_);
}
int DoCreateResolverComplete(int result) {
if (result != OK)
proxy_resolver_->reset();
return result;
}
void OnIOCompletion(int result) {
DCHECK_NE(STATE_NONE, next_state_);
int rv = DoLoop(result);
if (rv != ERR_IO_PENDING)
DoCallback(rv);
}
void DoCallback(int result) {
DCHECK_NE(ERR_IO_PENDING, result);
callback_.Run(result);
}
ProxyConfig config_;
ProxyConfig effective_config_;
scoped_refptr<ProxyResolverScriptData> script_data_;
TimeDelta wait_delay_;
scoped_ptr<ProxyScriptDecider> decider_;
ProxyResolverFactory* proxy_resolver_factory_;
scoped_ptr<ProxyResolverFactory::Request> create_resolver_request_;
scoped_ptr<ProxyResolver>* proxy_resolver_;
CompletionCallback callback_;
State next_state_;
bool quick_check_enabled_;
DISALLOW_COPY_AND_ASSIGN(InitProxyResolver);
};
// ProxyService::ProxyScriptDeciderPoller -------------------------------------
// This helper class encapsulates the logic to schedule and run periodic
// background checks to see if the PAC script (or effective proxy configuration)
// has changed. If a change is detected, then the caller will be notified via
// the ChangeCallback.
class ProxyService::ProxyScriptDeciderPoller {
public:
typedef base::Callback<void(int, ProxyResolverScriptData*,
const ProxyConfig&)> ChangeCallback;
// Builds a poller helper, and starts polling for updates. Whenever a change
// is observed, |callback| will be invoked with the details.
//
// |config| specifies the (unresolved) proxy configuration to poll.
// |proxy_resolver_expects_pac_bytes| the type of proxy resolver we expect
// to use the resulting script data with
// (so it can choose the right format).
// |proxy_script_fetcher| this pointer must remain alive throughout our
// lifetime. It is the dependency that will be used
// for downloading proxy scripts.
// |dhcp_proxy_script_fetcher| similar to |proxy_script_fetcher|, but for
// the DHCP dependency.
// |init_net_error| This is the initial network error (possibly success)
// encountered by the first PAC fetch attempt. We use it
// to schedule updates more aggressively if the initial
// fetch resulted in an error.
// |init_script_data| the initial script data from the PAC fetch attempt.
// This is the baseline used to determine when the
// script's contents have changed.
// |net_log| the NetLog to log progress into.
ProxyScriptDeciderPoller(ChangeCallback callback,
const ProxyConfig& config,
bool proxy_resolver_expects_pac_bytes,
ProxyScriptFetcher* proxy_script_fetcher,
DhcpProxyScriptFetcher* dhcp_proxy_script_fetcher,
int init_net_error,
const scoped_refptr<ProxyResolverScriptData>&
init_script_data,
NetLog* net_log)
: change_callback_(callback),
config_(config),
proxy_resolver_expects_pac_bytes_(proxy_resolver_expects_pac_bytes),
proxy_script_fetcher_(proxy_script_fetcher),
dhcp_proxy_script_fetcher_(dhcp_proxy_script_fetcher),
last_error_(init_net_error),
last_script_data_(init_script_data),
last_poll_time_(TimeTicks::Now()),
weak_factory_(this) {
// Set the initial poll delay.
next_poll_mode_ = poll_policy()->GetNextDelay(
last_error_, TimeDelta::FromSeconds(-1), &next_poll_delay_);
TryToStartNextPoll(false);
}
void OnLazyPoll() {
// We have just been notified of network activity. Use this opportunity to
// see if we can start our next poll.
TryToStartNextPoll(true);
}
static const PacPollPolicy* set_policy(const PacPollPolicy* policy) {
const PacPollPolicy* prev = poll_policy_;
poll_policy_ = policy;
return prev;
}
void set_quick_check_enabled(bool enabled) { quick_check_enabled_ = enabled; }
bool quick_check_enabled() const { return quick_check_enabled_; }
private:
// Returns the effective poll policy (the one injected by unit-tests, or the
// default).
const PacPollPolicy* poll_policy() {
if (poll_policy_)
return poll_policy_;
return &default_poll_policy_;
}
void StartPollTimer() {
DCHECK(!decider_.get());
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE, base::Bind(&ProxyScriptDeciderPoller::DoPoll,
weak_factory_.GetWeakPtr()),
next_poll_delay_);
}
void TryToStartNextPoll(bool triggered_by_activity) {
switch (next_poll_mode_) {
case PacPollPolicy::MODE_USE_TIMER:
if (!triggered_by_activity)
StartPollTimer();
break;
case PacPollPolicy::MODE_START_AFTER_ACTIVITY:
if (triggered_by_activity && !decider_.get()) {
TimeDelta elapsed_time = TimeTicks::Now() - last_poll_time_;
if (elapsed_time >= next_poll_delay_)
DoPoll();
}
break;
}
}
void DoPoll() {
last_poll_time_ = TimeTicks::Now();
// Start the proxy script decider to see if anything has changed.
// TODO(eroman): Pass a proper NetLog rather than NULL.
decider_.reset(new ProxyScriptDecider(
proxy_script_fetcher_, dhcp_proxy_script_fetcher_, NULL));
decider_->set_quick_check_enabled(quick_check_enabled_);
int result = decider_->Start(
config_, TimeDelta(), proxy_resolver_expects_pac_bytes_,
base::Bind(&ProxyScriptDeciderPoller::OnProxyScriptDeciderCompleted,
base::Unretained(this)));
if (result != ERR_IO_PENDING)
OnProxyScriptDeciderCompleted(result);
}
void OnProxyScriptDeciderCompleted(int result) {
if (HasScriptDataChanged(result, decider_->script_data())) {
// Something has changed, we must notify the ProxyService so it can
// re-initialize its ProxyResolver. Note that we post a notification task
// rather than calling it directly -- this is done to avoid an ugly
// destruction sequence, since |this| might be destroyed as a result of
// the notification.
base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
base::Bind(&ProxyScriptDeciderPoller::NotifyProxyServiceOfChange,
weak_factory_.GetWeakPtr(), result,
decider_->script_data(),
decider_->effective_config()));
return;
}
decider_.reset();
// Decide when the next poll should take place, and possibly start the
// next timer.
next_poll_mode_ = poll_policy()->GetNextDelay(
last_error_, next_poll_delay_, &next_poll_delay_);
TryToStartNextPoll(false);
}
bool HasScriptDataChanged(int result,
const scoped_refptr<ProxyResolverScriptData>& script_data) {
if (result != last_error_) {
// Something changed -- it was failing before and now it succeeded, or
// conversely it succeeded before and now it failed. Or it failed in
// both cases, however the specific failure error codes differ.
return true;
}
if (result != OK) {
// If it failed last time and failed again with the same error code this
// time, then nothing has actually changed.
return false;
}
// Otherwise if it succeeded both this time and last time, we need to look
// closer and see if we ended up downloading different content for the PAC
// script.
return !script_data->Equals(last_script_data_.get());
}
void NotifyProxyServiceOfChange(
int result,
const scoped_refptr<ProxyResolverScriptData>& script_data,
const ProxyConfig& effective_config) {
// Note that |this| may be deleted after calling into the ProxyService.
change_callback_.Run(result, script_data.get(), effective_config);
}
ChangeCallback change_callback_;
ProxyConfig config_;
bool proxy_resolver_expects_pac_bytes_;
ProxyScriptFetcher* proxy_script_fetcher_;
DhcpProxyScriptFetcher* dhcp_proxy_script_fetcher_;
int last_error_;
scoped_refptr<ProxyResolverScriptData> last_script_data_;
scoped_ptr<ProxyScriptDecider> decider_;
TimeDelta next_poll_delay_;
PacPollPolicy::Mode next_poll_mode_;
TimeTicks last_poll_time_;
// Polling policy injected by unit-tests. Otherwise this is NULL and the
// default policy will be used.
static const PacPollPolicy* poll_policy_;
const DefaultPollPolicy default_poll_policy_;
bool quick_check_enabled_;
base::WeakPtrFactory<ProxyScriptDeciderPoller> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(ProxyScriptDeciderPoller);
};
// static
const ProxyService::PacPollPolicy*
ProxyService::ProxyScriptDeciderPoller::poll_policy_ = NULL;
// ProxyService::PacRequest ---------------------------------------------------
class ProxyService::PacRequest
: public base::RefCounted<ProxyService::PacRequest> {
public:
PacRequest(ProxyService* service,
const GURL& url,
const std::string& method,
int load_flags,
ProxyDelegate* proxy_delegate,
ProxyInfo* results,
const CompletionCallback& user_callback,
const BoundNetLog& net_log)
: service_(service),
user_callback_(user_callback),
results_(results),
url_(url),
method_(method),
load_flags_(load_flags),
proxy_delegate_(proxy_delegate),
resolve_job_(NULL),
config_id_(ProxyConfig::kInvalidConfigID),
config_source_(PROXY_CONFIG_SOURCE_UNKNOWN),
net_log_(net_log),
creation_time_(TimeTicks::Now()) {
DCHECK(!user_callback.is_null());
}
// Starts the resolve proxy request.
int Start() {
DCHECK(!was_cancelled());
DCHECK(!is_started());
DCHECK(service_->config_.is_valid());
config_id_ = service_->config_.id();
config_source_ = service_->config_.source();
return resolver()->GetProxyForURL(
url_, results_,
base::Bind(&PacRequest::QueryComplete, base::Unretained(this)),
&resolve_job_, net_log_);
}
bool is_started() const {
// Note that !! casts to bool. (VS gives a warning otherwise).
return !!resolve_job_;
}
void StartAndCompleteCheckingForSynchronous() {
int rv = service_->TryToCompleteSynchronously(url_, load_flags_,
proxy_delegate_, results_);
if (rv == ERR_IO_PENDING)
rv = Start();
if (rv != ERR_IO_PENDING)
QueryComplete(rv);
}
void CancelResolveJob() {
DCHECK(is_started());
// The request may already be running in the resolver.
resolver()->CancelRequest(resolve_job_);
resolve_job_ = NULL;
DCHECK(!is_started());
}
void Cancel() {
net_log_.AddEvent(NetLog::TYPE_CANCELLED);
if (is_started())
CancelResolveJob();
// Mark as cancelled, to prevent accessing this again later.
service_ = NULL;
user_callback_.Reset();
results_ = NULL;
net_log_.EndEvent(NetLog::TYPE_PROXY_SERVICE);
}
// Returns true if Cancel() has been called.
bool was_cancelled() const {
return user_callback_.is_null();
}
// Helper to call after ProxyResolver completion (both synchronous and
// asynchronous). Fixes up the result that is to be returned to user.
int QueryDidComplete(int result_code) {
DCHECK(!was_cancelled());
// This state is cleared when resolve_job_ is set to nullptr below.
bool script_executed = is_started();
// Clear |resolve_job_| so is_started() returns false while
// DidFinishResolvingProxy() runs.
resolve_job_ = nullptr;
// Note that DidFinishResolvingProxy might modify |results_|.
int rv = service_->DidFinishResolvingProxy(
url_, method_, load_flags_, proxy_delegate_, results_, result_code,
net_log_, creation_time_, script_executed);
// Make a note in the results which configuration was in use at the
// time of the resolve.
results_->config_id_ = config_id_;
results_->config_source_ = config_source_;
results_->did_use_pac_script_ = true;
results_->proxy_resolve_start_time_ = creation_time_;
results_->proxy_resolve_end_time_ = TimeTicks::Now();
// Reset the state associated with in-progress-resolve.
config_id_ = ProxyConfig::kInvalidConfigID;
config_source_ = PROXY_CONFIG_SOURCE_UNKNOWN;
return rv;
}
BoundNetLog* net_log() { return &net_log_; }
LoadState GetLoadState() const {
if (is_started())
return resolver()->GetLoadState(resolve_job_);
return LOAD_STATE_RESOLVING_PROXY_FOR_URL;
}
private:
friend class base::RefCounted<ProxyService::PacRequest>;
~PacRequest() {}
// Callback for when the ProxyResolver request has completed.
void QueryComplete(int result_code) {
result_code = QueryDidComplete(result_code);
// Remove this completed PacRequest from the service's pending list.
/// (which will probably cause deletion of |this|).
if (!user_callback_.is_null()) {
CompletionCallback callback = user_callback_;
service_->RemovePendingRequest(this);
callback.Run(result_code);
}
}
ProxyResolver* resolver() const { return service_->resolver_.get(); }
// Note that we don't hold a reference to the ProxyService. Outstanding
// requests are cancelled during ~ProxyService, so this is guaranteed
// to be valid throughout our lifetime.
ProxyService* service_;
CompletionCallback user_callback_;
ProxyInfo* results_;
GURL url_;
std::string method_;
int load_flags_;
ProxyDelegate* proxy_delegate_;
ProxyResolver::RequestHandle resolve_job_;
ProxyConfig::ID config_id_; // The config id when the resolve was started.
ProxyConfigSource config_source_; // The source of proxy settings.
BoundNetLog net_log_;
// Time when the request was created. Stored here rather than in |results_|
// because the time in |results_| will be cleared.
TimeTicks creation_time_;
};
// ProxyService ---------------------------------------------------------------
ProxyService::ProxyService(scoped_ptr<ProxyConfigService> config_service,
scoped_ptr<ProxyResolverFactory> resolver_factory,
NetLog* net_log)
: resolver_factory_(std::move(resolver_factory)),
next_config_id_(1),
current_state_(STATE_NONE),
net_log_(net_log),
stall_proxy_auto_config_delay_(
TimeDelta::FromMilliseconds(kDelayAfterNetworkChangesMs)),
quick_check_enabled_(true) {
NetworkChangeNotifier::AddIPAddressObserver(this);
NetworkChangeNotifier::AddDNSObserver(this);
ResetConfigService(std::move(config_service));
}
// static
scoped_ptr<ProxyService> ProxyService::CreateUsingSystemProxyResolver(
scoped_ptr<ProxyConfigService> proxy_config_service,
size_t num_pac_threads,
NetLog* net_log) {
DCHECK(proxy_config_service);
if (!ProxyResolverFactoryForSystem::IsSupported()) {
VLOG(1) << "PAC support disabled because there is no system implementation";
return CreateWithoutProxyResolver(std::move(proxy_config_service), net_log);
}
if (num_pac_threads == 0)
num_pac_threads = kDefaultNumPacThreads;
return make_scoped_ptr(new ProxyService(
std::move(proxy_config_service),
make_scoped_ptr(new ProxyResolverFactoryForSystem(num_pac_threads)),
net_log));
}
// static
scoped_ptr<ProxyService> ProxyService::CreateWithoutProxyResolver(
scoped_ptr<ProxyConfigService> proxy_config_service,
NetLog* net_log) {
return make_scoped_ptr(new ProxyService(
std::move(proxy_config_service),
make_scoped_ptr(new ProxyResolverFactoryForNullResolver), net_log));
}
// static
scoped_ptr<ProxyService> ProxyService::CreateFixed(const ProxyConfig& pc) {
// TODO(eroman): This isn't quite right, won't work if |pc| specifies
// a PAC script.
return CreateUsingSystemProxyResolver(
make_scoped_ptr(new ProxyConfigServiceFixed(pc)), 0, NULL);
}
// static
scoped_ptr<ProxyService> ProxyService::CreateFixed(const std::string& proxy) {
ProxyConfig proxy_config;
proxy_config.proxy_rules().ParseFromString(proxy);
return ProxyService::CreateFixed(proxy_config);
}
// static
scoped_ptr<ProxyService> ProxyService::CreateDirect() {
return CreateDirectWithNetLog(NULL);
}
scoped_ptr<ProxyService> ProxyService::CreateDirectWithNetLog(NetLog* net_log) {
// Use direct connections.
return make_scoped_ptr(new ProxyService(
make_scoped_ptr(new ProxyConfigServiceDirect),
make_scoped_ptr(new ProxyResolverFactoryForNullResolver), net_log));
}
// static
scoped_ptr<ProxyService> ProxyService::CreateFixedFromPacResult(
const std::string& pac_string) {
// We need the settings to contain an "automatic" setting, otherwise the
// ProxyResolver dependency we give it will never be used.
scoped_ptr<ProxyConfigService> proxy_config_service(
new ProxyConfigServiceFixed(ProxyConfig::CreateAutoDetect()));
return make_scoped_ptr(new ProxyService(
std::move(proxy_config_service),
make_scoped_ptr(new ProxyResolverFactoryForPacResult(pac_string)), NULL));
}
int ProxyService::ResolveProxy(const GURL& raw_url,
const std::string& method,
int load_flags,
ProxyInfo* result,
const CompletionCallback& callback,
PacRequest** pac_request,
ProxyDelegate* proxy_delegate,
const BoundNetLog& net_log) {
DCHECK(!callback.is_null());
return ResolveProxyHelper(raw_url, method, load_flags, result, callback,
pac_request, proxy_delegate, net_log);
}
int ProxyService::ResolveProxyHelper(const GURL& raw_url,
const std::string& method,
int load_flags,
ProxyInfo* result,
const CompletionCallback& callback,
PacRequest** pac_request,
ProxyDelegate* proxy_delegate,
const BoundNetLog& net_log) {
DCHECK(CalledOnValidThread());
net_log.BeginEvent(NetLog::TYPE_PROXY_SERVICE);
// Notify our polling-based dependencies that a resolve is taking place.
// This way they can schedule their polls in response to network activity.
config_service_->OnLazyPoll();
if (script_poller_.get())
script_poller_->OnLazyPoll();
if (current_state_ == STATE_NONE)
ApplyProxyConfigIfAvailable();
// Strip away any reference fragments and the username/password, as they
// are not relevant to proxy resolution.
GURL url = SimplifyUrlForRequest(raw_url);
// Check if the request can be completed right away. (This is the case when
// using a direct connection for example).
int rv = TryToCompleteSynchronously(url, load_flags, proxy_delegate, result);
if (rv != ERR_IO_PENDING) {
rv = DidFinishResolvingProxy(
url, method, load_flags, proxy_delegate, result, rv, net_log,
callback.is_null() ? TimeTicks() : TimeTicks::Now(), false);
return rv;
}
if (callback.is_null())
return ERR_IO_PENDING;
scoped_refptr<PacRequest> req(new PacRequest(this, url, method, load_flags,
proxy_delegate, result, callback,
net_log));
if (current_state_ == STATE_READY) {
// Start the resolve request.
rv = req->Start();
if (rv != ERR_IO_PENDING)
return req->QueryDidComplete(rv);
} else {
req->net_log()->BeginEvent(NetLog::TYPE_PROXY_SERVICE_WAITING_FOR_INIT_PAC);
}
DCHECK_EQ(ERR_IO_PENDING, rv);
DCHECK(!ContainsPendingRequest(req.get()));
pending_requests_.insert(req);
// Completion will be notified through |callback|, unless the caller cancels
// the request using |pac_request|.
if (pac_request)
*pac_request = req.get();
return rv; // ERR_IO_PENDING
}
bool ProxyService::TryResolveProxySynchronously(const GURL& raw_url,
const std::string& method,
int load_flags,
ProxyInfo* result,
ProxyDelegate* proxy_delegate,
const BoundNetLog& net_log) {
CompletionCallback null_callback;
return ResolveProxyHelper(raw_url, method, load_flags, result, null_callback,
nullptr /* pac_request*/, proxy_delegate,
net_log) == OK;
}
int ProxyService::TryToCompleteSynchronously(const GURL& url,
int load_flags,
ProxyDelegate* proxy_delegate,
ProxyInfo* result) {
DCHECK_NE(STATE_NONE, current_state_);
if (current_state_ != STATE_READY)
return ERR_IO_PENDING; // Still initializing.
DCHECK_NE(config_.id(), ProxyConfig::kInvalidConfigID);
// If it was impossible to fetch or parse the PAC script, we cannot complete
// the request here and bail out.
if (permanent_error_ != OK)
return permanent_error_;
if (config_.HasAutomaticSettings())
return ERR_IO_PENDING; // Must submit the request to the proxy resolver.
// Use the manual proxy settings.
config_.proxy_rules().Apply(url, result);
result->config_source_ = config_.source();
result->config_id_ = config_.id();
return OK;
}
ProxyService::~ProxyService() {
NetworkChangeNotifier::RemoveIPAddressObserver(this);
NetworkChangeNotifier::RemoveDNSObserver(this);
config_service_->RemoveObserver(this);
// Cancel any inprogress requests.
for (PendingRequests::iterator it = pending_requests_.begin();
it != pending_requests_.end();
++it) {
(*it)->Cancel();
}
}
void ProxyService::SuspendAllPendingRequests() {
for (PendingRequests::iterator it = pending_requests_.begin();
it != pending_requests_.end();
++it) {
PacRequest* req = it->get();
if (req->is_started()) {
req->CancelResolveJob();
req->net_log()->BeginEvent(
NetLog::TYPE_PROXY_SERVICE_WAITING_FOR_INIT_PAC);
}
}
}
void ProxyService::SetReady() {
DCHECK(!init_proxy_resolver_.get());
current_state_ = STATE_READY;
// Make a copy in case |this| is deleted during the synchronous completion
// of one of the requests. If |this| is deleted then all of the PacRequest
// instances will be Cancel()-ed.
PendingRequests pending_copy = pending_requests_;
for (PendingRequests::iterator it = pending_copy.begin();
it != pending_copy.end();
++it) {
PacRequest* req = it->get();
if (!req->is_started() && !req->was_cancelled()) {
req->net_log()->EndEvent(NetLog::TYPE_PROXY_SERVICE_WAITING_FOR_INIT_PAC);
// Note that we re-check for synchronous completion, in case we are
// no longer using a ProxyResolver (can happen if we fell-back to manual).
req->StartAndCompleteCheckingForSynchronous();
}
}
}
void ProxyService::ApplyProxyConfigIfAvailable() {
DCHECK_EQ(STATE_NONE, current_state_);
config_service_->OnLazyPoll();
// If we have already fetched the configuration, start applying it.
if (fetched_config_.is_valid()) {
InitializeUsingLastFetchedConfig();
return;
}
// Otherwise we need to first fetch the configuration.
current_state_ = STATE_WAITING_FOR_PROXY_CONFIG;
// Retrieve the current proxy configuration from the ProxyConfigService.
// If a configuration is not available yet, we will get called back later
// by our ProxyConfigService::Observer once it changes.
ProxyConfig config;
ProxyConfigService::ConfigAvailability availability =
config_service_->GetLatestProxyConfig(&config);
if (availability != ProxyConfigService::CONFIG_PENDING)
OnProxyConfigChanged(config, availability);
}
void ProxyService::OnInitProxyResolverComplete(int result) {
DCHECK_EQ(STATE_WAITING_FOR_INIT_PROXY_RESOLVER, current_state_);
DCHECK(init_proxy_resolver_.get());
DCHECK(fetched_config_.HasAutomaticSettings());
config_ = init_proxy_resolver_->effective_config();
// At this point we have decided which proxy settings to use (i.e. which PAC
// script if any). We start up a background poller to periodically revisit
// this decision. If the contents of the PAC script change, or if the
// result of proxy auto-discovery changes, this poller will notice it and
// will trigger a re-initialization using the newly discovered PAC.
script_poller_.reset(new ProxyScriptDeciderPoller(
base::Bind(&ProxyService::InitializeUsingDecidedConfig,
base::Unretained(this)),
fetched_config_, resolver_factory_->expects_pac_bytes(),
proxy_script_fetcher_.get(), dhcp_proxy_script_fetcher_.get(), result,
init_proxy_resolver_->script_data(), NULL));
script_poller_->set_quick_check_enabled(quick_check_enabled_);
init_proxy_resolver_.reset();
// When using the out-of-process resolver, creating the resolver can complete
// with the ERR_PAC_SCRIPT_TERMINATED result code, which indicates the
// resolver process crashed.
UMA_HISTOGRAM_BOOLEAN("Net.ProxyService.ScriptTerminatedOnInit",
result == ERR_PAC_SCRIPT_TERMINATED);
if (result != OK) {
if (fetched_config_.pac_mandatory()) {
VLOG(1) << "Failed configuring with mandatory PAC script, blocking all "
"traffic.";
config_ = fetched_config_;
result = ERR_MANDATORY_PROXY_CONFIGURATION_FAILED;
} else {
VLOG(1) << "Failed configuring with PAC script, falling-back to manual "
"proxy servers.";
config_ = fetched_config_;
config_.ClearAutomaticSettings();
result = OK;
}
}
permanent_error_ = result;
// TODO(eroman): Make this ID unique in the case where configuration changed
// due to ProxyScriptDeciderPoller.
config_.set_id(fetched_config_.id());
config_.set_source(fetched_config_.source());
// Resume any requests which we had to defer until the PAC script was
// downloaded.
SetReady();
}
int ProxyService::ReconsiderProxyAfterError(const GURL& url,
const std::string& method,
int load_flags,
int net_error,
ProxyInfo* result,
const CompletionCallback& callback,
PacRequest** pac_request,
ProxyDelegate* proxy_delegate,
const BoundNetLog& net_log) {
DCHECK(CalledOnValidThread());
// Check to see if we have a new config since ResolveProxy was called. We
// want to re-run ResolveProxy in two cases: 1) we have a new config, or 2) a
// direct connection failed and we never tried the current config.
DCHECK(result);
bool re_resolve = result->config_id_ != config_.id();
if (re_resolve) {
// If we have a new config or the config was never tried, we delete the
// list of bad proxies and we try again.
proxy_retry_info_.clear();
return ResolveProxy(url, method, load_flags, result, callback, pac_request,
proxy_delegate, net_log);
}
DCHECK(!result->is_empty());
ProxyServer bad_proxy = result->proxy_server();
// We don't have new proxy settings to try, try to fallback to the next proxy
// in the list.
bool did_fallback = result->Fallback(net_error, net_log);
// Return synchronous failure if there is nothing left to fall-back to.
// TODO(eroman): This is a yucky API, clean it up.
return did_fallback ? OK : ERR_FAILED;
}
bool ProxyService::MarkProxiesAsBadUntil(
const ProxyInfo& result,
base::TimeDelta retry_delay,
const std::vector<ProxyServer>& additional_bad_proxies,
const BoundNetLog& net_log) {
result.proxy_list_.UpdateRetryInfoOnFallback(&proxy_retry_info_, retry_delay,
false, additional_bad_proxies,
OK, net_log);
return result.proxy_list_.size() > (additional_bad_proxies.size() + 1);
}
void ProxyService::ReportSuccess(const ProxyInfo& result,
ProxyDelegate* proxy_delegate) {
DCHECK(CalledOnValidThread());
const ProxyRetryInfoMap& new_retry_info = result.proxy_retry_info();
if (new_retry_info.empty())
return;
for (ProxyRetryInfoMap::const_iterator iter = new_retry_info.begin();
iter != new_retry_info.end(); ++iter) {
ProxyRetryInfoMap::iterator existing = proxy_retry_info_.find(iter->first);
if (existing == proxy_retry_info_.end()) {
proxy_retry_info_[iter->first] = iter->second;
if (proxy_delegate) {
const ProxyServer& bad_proxy =
ProxyServer::FromURI(iter->first, ProxyServer::SCHEME_HTTP);
const ProxyRetryInfo& proxy_retry_info = iter->second;
proxy_delegate->OnFallback(bad_proxy, proxy_retry_info.net_error);
}
}
else if (existing->second.bad_until < iter->second.bad_until)
existing->second.bad_until = iter->second.bad_until;
}
if (net_log_) {
net_log_->AddGlobalEntry(
NetLog::TYPE_BAD_PROXY_LIST_REPORTED,
base::Bind(&NetLogBadProxyListCallback, &new_retry_info));
}
}
void ProxyService::CancelPacRequest(PacRequest* req) {
DCHECK(CalledOnValidThread());
DCHECK(req);
req->Cancel();
RemovePendingRequest(req);
}
LoadState ProxyService::GetLoadState(const PacRequest* req) const {
CHECK(req);
if (current_state_ == STATE_WAITING_FOR_INIT_PROXY_RESOLVER)
return init_proxy_resolver_->GetLoadState();
return req->GetLoadState();
}
bool ProxyService::ContainsPendingRequest(PacRequest* req) {
return pending_requests_.count(req) == 1;
}
void ProxyService::RemovePendingRequest(PacRequest* req) {
DCHECK(ContainsPendingRequest(req));
pending_requests_.erase(req);
}
int ProxyService::DidFinishResolvingProxy(const GURL& url,
const std::string& method,
int load_flags,
ProxyDelegate* proxy_delegate,
ProxyInfo* result,
int result_code,
const BoundNetLog& net_log,
base::TimeTicks start_time,
bool script_executed) {
// Don't track any metrics if start_time is 0, which will happen when the user
// calls |TryResolveProxySynchronously|.
if (!start_time.is_null()) {
TimeDelta diff = TimeTicks::Now() - start_time;
if (script_executed) {
// This function "fixes" the result code, so make sure script terminated
// errors are tracked. Only track result codes that were a result of
// script execution.
UMA_HISTOGRAM_BOOLEAN("Net.ProxyService.ScriptTerminated",
result_code == ERR_PAC_SCRIPT_TERMINATED);
UMA_HISTOGRAM_CUSTOM_TIMES("Net.ProxyService.GetProxyUsingScriptTime",
diff, base::TimeDelta::FromMicroseconds(100),
base::TimeDelta::FromSeconds(20), 50);
UMA_HISTOGRAM_SPARSE_SLOWLY("Net.ProxyService.GetProxyUsingScriptResult",
std::abs(result_code));
}
UMA_HISTOGRAM_BOOLEAN("Net.ProxyService.ResolvedUsingScript",
script_executed);
UMA_HISTOGRAM_CUSTOM_TIMES("Net.ProxyService.ResolveProxyTime", diff,
base::TimeDelta::FromMicroseconds(100),
base::TimeDelta::FromSeconds(20), 50);
}
// Log the result of the proxy resolution.
if (result_code == OK) {
// Allow the proxy delegate to interpose on the resolution decision,
// possibly modifying the ProxyInfo.
if (proxy_delegate)
proxy_delegate->OnResolveProxy(url, method, load_flags, *this, result);
net_log.AddEvent(NetLog::TYPE_PROXY_SERVICE_RESOLVED_PROXY_LIST,
base::Bind(&NetLogFinishedResolvingProxyCallback, result));
// This check is done to only log the NetLog event when necessary, it's
// not a performance optimization.
if (!proxy_retry_info_.empty()) {
result->DeprioritizeBadProxies(proxy_retry_info_);
net_log.AddEvent(
NetLog::TYPE_PROXY_SERVICE_DEPRIORITIZED_BAD_PROXIES,
base::Bind(&NetLogFinishedResolvingProxyCallback, result));
}
} else {
net_log.AddEventWithNetErrorCode(
NetLog::TYPE_PROXY_SERVICE_RESOLVED_PROXY_LIST, result_code);
bool reset_config = result_code == ERR_PAC_SCRIPT_TERMINATED;
if (!config_.pac_mandatory()) {
// Fall-back to direct when the proxy resolver fails. This corresponds
// with a javascript runtime error in the PAC script.
//
// This implicit fall-back to direct matches Firefox 3.5 and
// Internet Explorer 8. For more information, see:
//
// http://www.chromium.org/developers/design-documents/proxy-settings-fallback
result->UseDirect();
result_code = OK;
// Allow the proxy delegate to interpose on the resolution decision,
// possibly modifying the ProxyInfo.
if (proxy_delegate)
proxy_delegate->OnResolveProxy(url, method, load_flags, *this, result);
} else {
result_code = ERR_MANDATORY_PROXY_CONFIGURATION_FAILED;
}
if (reset_config) {
ResetProxyConfig(false);
// If the ProxyResolver crashed, force it to be re-initialized for the
// next request by resetting the proxy config. If there are other pending
// requests, trigger the recreation immediately so those requests retry.
if (pending_requests_.size() > 1)
ApplyProxyConfigIfAvailable();
}
}
net_log.EndEvent(NetLog::TYPE_PROXY_SERVICE);
return result_code;
}
void ProxyService::SetProxyScriptFetchers(
ProxyScriptFetcher* proxy_script_fetcher,
scoped_ptr<DhcpProxyScriptFetcher> dhcp_proxy_script_fetcher) {
DCHECK(CalledOnValidThread());
State previous_state = ResetProxyConfig(false);
proxy_script_fetcher_.reset(proxy_script_fetcher);
dhcp_proxy_script_fetcher_ = std::move(dhcp_proxy_script_fetcher);
if (previous_state != STATE_NONE)
ApplyProxyConfigIfAvailable();
}
ProxyScriptFetcher* ProxyService::GetProxyScriptFetcher() const {
DCHECK(CalledOnValidThread());
return proxy_script_fetcher_.get();
}
ProxyService::State ProxyService::ResetProxyConfig(bool reset_fetched_config) {
DCHECK(CalledOnValidThread());
State previous_state = current_state_;
permanent_error_ = OK;
proxy_retry_info_.clear();
script_poller_.reset();
init_proxy_resolver_.reset();
SuspendAllPendingRequests();
resolver_.reset();
config_ = ProxyConfig();
if (reset_fetched_config)
fetched_config_ = ProxyConfig();
current_state_ = STATE_NONE;
return previous_state;
}
void ProxyService::ResetConfigService(
scoped_ptr<ProxyConfigService> new_proxy_config_service) {
DCHECK(CalledOnValidThread());
State previous_state = ResetProxyConfig(true);
// Release the old configuration service.
if (config_service_.get())
config_service_->RemoveObserver(this);
// Set the new configuration service.
config_service_ = std::move(new_proxy_config_service);
config_service_->AddObserver(this);
if (previous_state != STATE_NONE)
ApplyProxyConfigIfAvailable();
}
void ProxyService::ForceReloadProxyConfig() {
DCHECK(CalledOnValidThread());
ResetProxyConfig(false);
ApplyProxyConfigIfAvailable();
}
// static
scoped_ptr<ProxyConfigService> ProxyService::CreateSystemProxyConfigService(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner,
const scoped_refptr<base::SingleThreadTaskRunner>& file_task_runner) {
#if defined(OS_WIN)
return make_scoped_ptr(new ProxyConfigServiceWin());
#elif defined(OS_IOS)
return make_scoped_ptr(new ProxyConfigServiceIOS());
#elif defined(OS_MACOSX)
return make_scoped_ptr(new ProxyConfigServiceMac(io_task_runner));
#elif defined(OS_CHROMEOS)
LOG(ERROR) << "ProxyConfigService for ChromeOS should be created in "
<< "profile_io_data.cc::CreateProxyConfigService and this should "
<< "be used only for examples.";
return make_scoped_ptr(new UnsetProxyConfigService);
#elif defined(OS_LINUX)
scoped_ptr<ProxyConfigServiceLinux> linux_config_service(
new ProxyConfigServiceLinux());
// Assume we got called on the thread that runs the default glib
// main loop, so the current thread is where we should be running
// gconf calls from.
scoped_refptr<base::SingleThreadTaskRunner> glib_thread_task_runner =
base::ThreadTaskRunnerHandle::Get();
// Synchronously fetch the current proxy config (since we are running on
// glib_default_loop). Additionally register for notifications (delivered in
// either |glib_default_loop| or |file_task_runner|) to keep us updated when
// the proxy config changes.
linux_config_service->SetupAndFetchInitialConfig(
glib_thread_task_runner, io_task_runner, file_task_runner);
return std::move(linux_config_service);
#elif defined(OS_ANDROID)
return make_scoped_ptr(new ProxyConfigServiceAndroid(
io_task_runner, base::ThreadTaskRunnerHandle::Get()));
#else
LOG(WARNING) << "Failed to choose a system proxy settings fetcher "
"for this platform.";
return make_scoped_ptr(new ProxyConfigServiceDirect());
#endif
}
// static
const ProxyService::PacPollPolicy* ProxyService::set_pac_script_poll_policy(
const PacPollPolicy* policy) {
return ProxyScriptDeciderPoller::set_policy(policy);
}
// static
scoped_ptr<ProxyService::PacPollPolicy>
ProxyService::CreateDefaultPacPollPolicy() {
return scoped_ptr<PacPollPolicy>(new DefaultPollPolicy());
}
void ProxyService::OnProxyConfigChanged(
const ProxyConfig& config,
ProxyConfigService::ConfigAvailability availability) {
// Retrieve the current proxy configuration from the ProxyConfigService.
// If a configuration is not available yet, we will get called back later
// by our ProxyConfigService::Observer once it changes.
ProxyConfig effective_config;
switch (availability) {
case ProxyConfigService::CONFIG_PENDING:
// ProxyConfigService implementors should never pass CONFIG_PENDING.
NOTREACHED() << "Proxy config change with CONFIG_PENDING availability!";
return;
case ProxyConfigService::CONFIG_VALID:
effective_config = config;
break;
case ProxyConfigService::CONFIG_UNSET:
effective_config = ProxyConfig::CreateDirect();
break;
}
// Emit the proxy settings change to the NetLog stream.
if (net_log_) {
net_log_->AddGlobalEntry(NetLog::TYPE_PROXY_CONFIG_CHANGED,
base::Bind(&NetLogProxyConfigChangedCallback,
&fetched_config_, &effective_config));
}
// Set the new configuration as the most recently fetched one.
fetched_config_ = effective_config;
fetched_config_.set_id(1); // Needed for a later DCHECK of is_valid().
InitializeUsingLastFetchedConfig();
}
void ProxyService::InitializeUsingLastFetchedConfig() {
ResetProxyConfig(false);
DCHECK(fetched_config_.is_valid());
// Increment the ID to reflect that the config has changed.
fetched_config_.set_id(next_config_id_++);
if (!fetched_config_.HasAutomaticSettings()) {
config_ = fetched_config_;
SetReady();
return;
}
// Start downloading + testing the PAC scripts for this new configuration.
current_state_ = STATE_WAITING_FOR_INIT_PROXY_RESOLVER;
// If we changed networks recently, we should delay running proxy auto-config.
TimeDelta wait_delay =
stall_proxy_autoconfig_until_ - TimeTicks::Now();
init_proxy_resolver_.reset(new InitProxyResolver());
init_proxy_resolver_->set_quick_check_enabled(quick_check_enabled_);
int rv = init_proxy_resolver_->Start(
&resolver_, resolver_factory_.get(), proxy_script_fetcher_.get(),
dhcp_proxy_script_fetcher_.get(), net_log_, fetched_config_, wait_delay,
base::Bind(&ProxyService::OnInitProxyResolverComplete,
base::Unretained(this)));
if (rv != ERR_IO_PENDING)
OnInitProxyResolverComplete(rv);
}
void ProxyService::InitializeUsingDecidedConfig(
int decider_result,
ProxyResolverScriptData* script_data,
const ProxyConfig& effective_config) {
DCHECK(fetched_config_.is_valid());
DCHECK(fetched_config_.HasAutomaticSettings());
ResetProxyConfig(false);
current_state_ = STATE_WAITING_FOR_INIT_PROXY_RESOLVER;
init_proxy_resolver_.reset(new InitProxyResolver());
int rv = init_proxy_resolver_->StartSkipDecider(
&resolver_, resolver_factory_.get(), effective_config, decider_result,
script_data, base::Bind(&ProxyService::OnInitProxyResolverComplete,
base::Unretained(this)));
if (rv != ERR_IO_PENDING)
OnInitProxyResolverComplete(rv);
}
void ProxyService::OnIPAddressChanged() {
// See the comment block by |kDelayAfterNetworkChangesMs| for info.
stall_proxy_autoconfig_until_ =
TimeTicks::Now() + stall_proxy_auto_config_delay_;
State previous_state = ResetProxyConfig(false);
if (previous_state != STATE_NONE)
ApplyProxyConfigIfAvailable();
}
void ProxyService::OnDNSChanged() {
OnIPAddressChanged();
}
} // namespace net<|fim▁end|> | next_state_ = STATE_CREATE_RESOLVER_COMPLETE;
return proxy_resolver_factory_->CreateProxyResolver( |
<|file_name|>test_account_invoice_line.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2015-2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import mock
from odoo.addons.connector_carepoint.models import account_invoice_line
from ..common import SetUpCarepointBase
model = 'odoo.addons.connector_carepoint.models.account_invoice_line'
class EndTestException(Exception):
pass
class AccountInvoiceLineTestBase(SetUpCarepointBase):
def setUp(self):
super(AccountInvoiceLineTestBase, self).setUp()
self.model = 'carepoint.account.invoice.line'
self.mock_env = self.get_carepoint_helper(
self.model
)
@property
def record(self):
""" Model record fixture """
return {
'rxdisp_id': 12345,
'primary_pay_date': '2016-01-23 01:23:45',
't_patient_pay_sub': '10.23',
}
class TestAccountInvoiceLineUnit(AccountInvoiceLineTestBase):
def setUp(self):
super(TestAccountInvoiceLineUnit, self).setUp()
self.Unit = account_invoice_line.AccountInvoiceLineUnit
self.unit = self.Unit(self.mock_env)
def test_import_invoice_lines_for_procurement_unit_for_adapter(self):
""" It should get unit for adapter """
with mock.patch.object(self.unit, 'unit_for') as mk:
mk.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._import_invoice_lines_for_procurement(True)
mk.assert_called_once_with(
account_invoice_line.CarepointCRUDAdapter
)
def test_import_invoice_lines_for_procurement_unit_for_importer(self):
""" It should get unit for importer """<|fim▁hole|> mk.assert_called_with(
account_invoice_line.AccountInvoiceLineImporter
)
def test_import_invoice_lines_for_procurement_search(self):
""" It should search adapter for unit """
expect = 'expect'
with mock.patch.object(self.unit, 'unit_for') as mk:
mk().search.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._import_invoice_lines_for_procurement(expect)
mk().search.assert_called_once_with(
rxdisp_id=expect,
)
def test_import_invoice_lines_for_procurement_imports(self):
""" It should run importer on records """
with mock.patch.object(self.unit, 'unit_for') as mk:
expect = mock.MagicMock()
adapter = mock.MagicMock()
adapter.search.return_value = [True]
mk.side_effect = [adapter, expect]
expect.run.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._import_invoice_lines_for_procurement(True)
expect.run.assert_called_once_with(
adapter.search()[0]
)
class TestAccountInvoiceLineImportMapper(AccountInvoiceLineTestBase):
def setUp(self):
super(TestAccountInvoiceLineImportMapper, self).setUp()
self.Unit = account_invoice_line.AccountInvoiceLineImportMapper
self.unit = self.Unit(self.mock_env)
def test_carepoint_id(self):
""" It should return correct attribute """
expect = {'carepoint_id': self.record['rxdisp_id']}
res = self.unit.carepoint_id(self.record)
self.assertDictEqual(expect, res)
def test_invoice_id_get_binder(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_id(self.record)
self.unit.binder_for.assert_called_once_with(
'carepoint.procurement.order'
)
def test_invoice_id_to_odoo(self):
""" It should get Odoo record for binding """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_id(self.record)
self.unit.binder_for().to_odoo.assert_called_once_with(
self.record['rxdisp_id'], browse=True,
)
def test_invoice_id_search(self):
""" It should search for invoice from origin """
with mock.patch.object(self.unit, 'binder_for'):
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.side_effect = EndTestException
proc_id = self.unit.binder_for().to_odoo()
with self.assertRaises(EndTestException):
self.unit.invoice_id(self.record)
env['account.invoice'].search.assert_called_once_with(
[('origin', '=', proc_id.sale_line_id.order_id.name)],
limit=1,
)
def test_invoice_id_existing_invoice(self):
""" It should return existing matches invoice """
expect = mock.MagicMock()
with mock.patch.object(self.unit, 'binder_for'):
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = [expect]
res = self.unit.invoice_id(self.record)
expect = {
'invoice_id': expect.id,
}
self.assertDictEqual(res, expect)
def test_invoice_id_new_invoice_prepare_invoice(self):
""" It should prepare invoice from sale order if not existing """
with mock.patch.object(self.unit, 'binder_for') as mk:
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = []
prep = mk().to_odoo().sale_line_id.order_id._prepare_invoice
prep.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_id(self.record)
def test_invoice_id_new_invoice_updates_invoice_date(self):
""" It should inject remote date into invoice vals """
with mock.patch.object(self.unit, 'binder_for') as mk:
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = []
prep = mk().to_odoo().sale_line_id.order_id._prepare_invoice
self.unit.invoice_id(self.record)
prep().update.assert_called_once_with({
'date_invoice': self.record['primary_pay_date'],
})
def test_invoice_id_new_invoice_create(self):
""" It should create invoice with proper vals """
with mock.patch.object(self.unit, 'binder_for') as mk:
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = []
prep = mk().to_odoo().sale_line_id.order_id._prepare_invoice
self.unit.invoice_id(self.record)
env['account.invoice'].create.assert_called_once_with(prep())
def test_invoice_id_new_invoice_create_return(self):
""" It should return result of create in values """
with mock.patch.object(self.unit, 'binder_for'):
with mock.patch.object(self.unit.session, 'env') as env:
env['account.invoice'].search.return_value = []
res = self.unit.invoice_id(self.record)
expect = {'invoice_id': env['account.invoice'].create().id}
self.assertDictEqual(expect, res)
def test_sale_line_ids_get_binder(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.sale_line_ids(self.record)
self.unit.binder_for.assert_called_once_with(
'carepoint.procurement.order'
)
def test_sale_line_ids_to_odoo(self):
""" It should get Odoo record for binding """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.sale_line_ids(self.record)
self.unit.binder_for().to_odoo.assert_called_once_with(
self.record['rxdisp_id'], browse=True,
)
def test_sale_line_ids_return(self):
""" It should return proper values dict """
with mock.patch.object(self.unit, 'binder_for') as mk:
res = self.unit.sale_line_ids(self.record)
expect = {
'sale_line_ids': [(6, 0, [mk().to_odoo().sale_line_id.id])]
}
self.assertDictEqual(expect, res)
def test_invoice_line_data_get_binder(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_line_data(self.record)
self.unit.binder_for.assert_called_once_with(
'carepoint.procurement.order'
)
def test_invoice_line_data_to_odoo(self):
""" It should get Odoo record for binding """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit.invoice_line_data(self.record)
self.unit.binder_for().to_odoo.assert_called_once_with(
self.record['rxdisp_id'], browse=True,
)
def test_invoice_line_data_sets_price_unit(self):
""" It should set the price_unit on sale line to circumvent lack
of price data in the remote sales records
"""
qty = 20
with mock.patch.object(self.unit, 'binder_for'):
line_id = self.unit.binder_for().to_odoo().sale_line_id
line_id.product_uom_qty = qty
self.unit.invoice_line_data(self.record)
self.assertEqual(
float(self.record['t_patient_pay_sub']) / qty,
line_id.price_unit
)
def test_invoice_line_data_prepares_invoice_line(self):
""" It should prepare invoice line based on sale line """
qty = 20
with mock.patch.object(self.unit, 'binder_for'):
line_id = self.unit.binder_for().to_odoo().sale_line_id
line_id.product_uom_qty = qty
self.unit.invoice_line_data(self.record)
line_id._prepare_invoice_line.assert_called_once_with(qty)
def test_invoice_line_data_return(self):
""" It should prepare invoice line based on sale line """
qty = 20
with mock.patch.object(self.unit, 'binder_for'):
line_id = self.unit.binder_for().to_odoo().sale_line_id
line_id.product_uom_qty = qty
res = self.unit.invoice_line_data(self.record)
self.assertEqual(line_id._prepare_invoice_line(), res)
class TestAccountInvoiceLineImporter(AccountInvoiceLineTestBase):
def setUp(self):
super(TestAccountInvoiceLineImporter, self).setUp()
self.Unit = account_invoice_line.AccountInvoiceLineImporter
self.unit = self.Unit(self.mock_env)
self.unit.carepoint_record = self.record
def test_import_dependencies(self):
""" It should import all depedencies """
with mock.patch.object(self.unit, '_import_dependency') as mk:
self.unit._import_dependencies()
mk.assert_has_calls([
mock.call(
self.record['rxdisp_id'],
'carepoint.procurement.order',
),
])
def test_after_import_get_binder_procurement(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.binder_for.assert_called_once_with(
'carepoint.procurement.order'
)
def test_after_import_to_odoo_procurement(self):
""" It should get Odoo record for binding """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.binder_for().to_odoo.assert_called_once_with(
self.record['rxdisp_id'], browse=True,
)
def test_after_import_get_binder_sale(self):
""" It should get binder for record type """
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for.side_effect = [mock.MagicMock(),
EndTestException]
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.binder_for.assert_called_with(
'carepoint.sale.order'
)
def test_after_import_to_backend_sale(self):
""" It should get backend record for binding """
proc = mock.MagicMock()
with mock.patch.object(self.unit, 'binder_for'):
self.unit.binder_for().to_odoo.return_value = proc
self.unit.binder_for().to_backend.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.binder_for().to_backend.assert_called_with(
proc.sale_line_id.order_id.id,
)
def test_after_import_gets_proc_unit(self):
""" It should get unit for model """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT
):
self.unit.unit_for.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.unit_for.assert_called_with(
account_invoice_line.ProcurementOrderUnit,
model='carepoint.procurement.order',
)
def test_after_import_gets_order_line_cnt(self):
""" It should get count of order lines for sale """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT
):
self.unit.unit_for()._get_order_line_count.side_effect = \
EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.unit_for()._get_order_line_count.assert_called_with(
self.unit.binder_for().to_backend()
)
def test_after_import_gets_ref_for_cp_state(self):
""" It should get reference for carepoint state record """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
self.unit.env.ref.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
self.unit.env.ref.assert_called_with(
'connector_carepoint.state_%d' % (
self.unit.binder_for().to_odoo().sale_line_id.
order_id.carepoint_order_state_cn
)
)
def test_after_import_invoice_write_new_state(self):
""" It should write to invoice new states provided by remote system """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
invoice_id.write.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
invoice_id.write.assert_called_once_with({
'state': self.unit.env.ref().invoice_state,
})
def test_after_import_invoice_create_moves(self):
""" It should create accounting moves for newly paid invoices """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
self.unit.env.ref().invoice_state = 'paid'
invoice_id.action_move_create.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
def test_after_import_invoice_validate(self):
""" It should validate newly paid invoices """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
self.unit.env.ref().invoice_state = 'paid'
invoice_id.invoice_validate.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
def test_after_import_invoice_validate(self):
""" It should pay and reconcile invoices when residual on invoice """
with mock.patch.multiple(
self.unit, binder_for=mock.DEFAULT, unit_for=mock.DEFAULT,
session=mock.DEFAULT, _get_binding=mock.DEFAULT,
):
invoice_id = self.unit._get_binding().invoice_id
invoice_id.residual = 1
self.unit.unit_for()._get_order_line_count.return_value = 1
invoice_id.invoice_line_ids = [True]
self.unit.env.ref().invoice_state = 'paid'
invoice_id.pay_and_reconcile.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.unit._after_import(self.record)
invoice_id.pay_and_reconcile.assert_called_once_with(
self.unit.backend_record.default_payment_journal,
date=invoice_id.date_invoice,
)<|fim▁end|> | with mock.patch.object(self.unit, 'unit_for') as mk:
mk.side_effect = [None, EndTestException]
with self.assertRaises(EndTestException):
self.unit._import_invoice_lines_for_procurement(True) |
<|file_name|>pathstyles.py<|end_file_name|><|fim▁begin|>from cadnano.gui.views.styles import *
from PyQt5.QtGui import QColor, QFont, QFontMetricsF
# Path Sizing
VIRTUALHELIXHANDLEITEM_RADIUS = 30
VIRTUALHELIXHANDLEITEM_STROKE_WIDTH = 2
PATH_BASE_WIDTH = 20 # used to size bases (grid squares, handles, etc)
PATH_HELIX_HEIGHT = 2 * PATH_BASE_WIDTH # staple + scaffold
PATH_HELIX_PADDING = 50 # gap between PathHelix objects in path view
PATH_GRID_STROKE_WIDTH = 0.5
SLICE_HANDLE_STROKE_WIDTH = 1
PATH_STRAND_STROKE_WIDTH = 3
PATH_STRAND_HIGHLIGHT_STROKE_WIDTH = 8
PATH_SELECTBOX_STROKE_WIDTH = 1.5
PCH_BORDER_PADDING = 1
PATH_BASE_HL_STROKE_WIDTH = 2 # PathTool highlight box
MINOR_GRID_STROKE_WIDTH = 0.5
MAJOR_GRID_STROKE_WIDTH = 0.5
OLIGO_LEN_BELOW_WHICH_HIGHLIGHT = 20
OLIGO_LEN_ABOVE_WHICH_HIGHLIGHT = 49
# Path Drawing
PATH_XOVER_LINE_SCALE_X = 0.035
PATH_XOVER_LINE_SCALE_Y = 0.035
# Path Colors
SCAFFOLD_BKG_FILL = QColor(230, 230, 230)
ACTIVE_SLICE_HANDLE_FILL = QColor(255, 204, 153, 128) # ffcc99
ACTIVE_SLICE_HANDLE_STROKE = QColor(204, 102, 51, 128) # cc6633
MINOR_GRID_STROKE = QColor(204, 204, 204) # 999999
MAJOR_GRID_STROKE = QColor(153, 153, 153) # 333333
SCAF_STROKE = QColor(0, 102, 204) # 0066cc
HANDLE_FILL = QColor(0, 102, 204) # 0066cc
PXI_SCAF_STROKE = QColor(0, 102, 204, 153)
PXI_STAP_STROKE = QColor(204, 0, 0, 153)
PXI_DISAB_STROKE = QColor(204, 204, 204, 255)
RED_STROKE = QColor(204, 0, 0)
ERASE_FILL = QColor(204, 0, 0, 63)
FORCE_FILL = QColor(0, 255, 255, 63)
BREAK_FILL = QColor(204, 0, 0, 255)
COLORBOX_FILL = QColor(204, 0, 0)
COLORBOX_STROKE = QColor(102, 102, 102)
STAP_COLORS = [QColor(204, 0, 0),
QColor(247, 67, 8),
QColor(247, 147, 30),
QColor(170, 170, 0),
QColor(87, 187, 0),
QColor(0, 114, 0),
QColor(3, 182, 162),
QColor(23, 0, 222),
QColor(115, 0, 222),
QColor(184, 5, 108),
QColor(51, 51, 51),
QColor(136, 136, 136)]
SCAF_COLORS = [QColor(0, 102, 204)]
# QColor(64, 138, 212),
# QColor(0, 38, 76),
# QColor(23, 50, 76),
# QColor(0, 76, 153)]
DEFAULT_STAP_COLOR = "#888888"
DEFAULT_SCAF_COLOR = "#0066cc"
SELECTED_COLOR = QColor(255, 51, 51)
# brightColors = [QColor() for i in range(10)]
# for i in range(len(brightColors)):
# brightColors[i].setHsvF(i/12.0, 1.0, 1.0)
# bright_palette = Palette(brightColors)
# cadnn1_palette = Palette(cadnn1Colors)
# default_palette = cadnn1_palette
SELECTIONBOX_PEN_WIDTH = 2.5
# Loop/Insertion path details
INSERTWIDTH = 2
SKIPWIDTH = 2
# Add Sequence Tool
INVALID_DNA_COLOR = QColor(204, 0, 0)
UNDERLINE_INVALID_DNA = True
#Z values
#bottom
ZACTIVESLICEHANDLE = 10<|fim▁hole|>ZPATHHELIX = 30
ZPATHSELECTION = 40
ZXOVERITEM = 90
ZPATHTOOL = 130
ZSTRANDITEM = 140
ZENDPOINTITEM = 150
ZINSERTHANDLE = 160
#top
# sequence stuff Font stuff
SEQUENCEFONT = None
SEQUENCEFONTH = 15
SEQUENCEFONTCHARWIDTH = 12
SEQUENCEFONTCHARHEIGHT = 12
SEQUENCEFONTEXTRAWIDTH = 3
SEQUENCETEXTXCENTERINGOFFSET = 0
def setFontMetrics():
""" Application must be running before you mess
too much with Fonts in Qt5
"""
global SEQUENCEFONT
global SEQUENCEFONTMETRICS
global SEQUENCEFONTCHARWIDTH
global SEQUENCEFONTCHARHEIGHT
global SEQUENCEFONTEXTRAWIDTH
global SEQUENCETEXTXCENTERINGOFFSET
global SEQUENCETEXTYCENTERINGOFFSET
SEQUENCEFONT = QFont("Monaco")
if hasattr(QFont, 'Monospace'):
SEQUENCEFONT.setStyleHint(QFont.Monospace)
SEQUENCEFONT.setFixedPitch(True)
SEQUENCEFONTH = int(PATH_BASE_WIDTH / 3.)
SEQUENCEFONT.setPixelSize(SEQUENCEFONTH)
SEQUENCEFONTMETRICS = QFontMetricsF(SEQUENCEFONT)
SEQUENCEFONTCHARWIDTH = SEQUENCEFONTMETRICS.width("A")
SEQUENCEFONTCHARHEIGHT = SEQUENCEFONTMETRICS.height()
SEQUENCEFONTEXTRAWIDTH = PATH_BASE_WIDTH - SEQUENCEFONTCHARWIDTH
SEQUENCEFONT.setLetterSpacing(QFont.AbsoluteSpacing,
SEQUENCEFONTEXTRAWIDTH)
SEQUENCETEXTXCENTERINGOFFSET = SEQUENCEFONTEXTRAWIDTH / 4.
SEQUENCETEXTYCENTERINGOFFSET = PATH_BASE_WIDTH * 0.6
#end def
XOVER_LABEL_FONT = QFont(THE_FONT, THE_FONT_SIZE, QFont.Bold)
VIRTUALHELIXHANDLEITEM_FONT = QFont(THE_FONT, 3*THE_FONT_SIZE, QFont.Bold)
XOVER_LABEL_COLOR = QColor(0,0,0)<|fim▁end|> | ZPATHHELIXGROUP = 20 |
<|file_name|>issue-42679.rs<|end_file_name|><|fim▁begin|>// run-pass
#![feature(box_syntax)]
#![feature(box_patterns)]
#[derive(Debug, PartialEq)]<|fim▁hole|>enum Test {
Foo(usize),
Bar(isize),
}
fn main() {
let a = box Test::Foo(10);
let b = box Test::Bar(-20);
match (a, b) {
(_, box Test::Foo(_)) => unreachable!(),
(box Test::Foo(x), b) => {
assert_eq!(x, 10);
assert_eq!(b, box Test::Bar(-20));
},
_ => unreachable!(),
}
}<|fim▁end|> | |
<|file_name|>test_bsp.cpp<|end_file_name|><|fim▁begin|>//
// This source file is part of appleseed.
// Visit https://appleseedhq.net/ for additional information and resources.
//
// This software is released under the MIT license.
//
// Copyright (c) 2010-2013 Francois Beaune, Jupiter Jazz Limited
// Copyright (c) 2014-2018 Francois Beaune, The appleseedhq Organization
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
// appleseed.foundation headers.
#include "foundation/core/concepts/noncopyable.h"
#include "foundation/math/aabb.h"
#include "foundation/math/bsp.h"
#include "foundation/math/intersection/rayaabb.h"
#include "foundation/math/ray.h"
#include "foundation/math/split.h"
#include "foundation/math/vector.h"
#include "foundation/utility/test.h"
// Standard headers.
#include <algorithm>
#include <cstddef>
#include <limits>
#include <memory>
#include <utility>
#include <vector>
using namespace foundation;
using namespace std;
TEST_SUITE(Foundation_Math_BSP_Node)
{
typedef bsp::Node<double> NodeType;
TEST_CASE(TestLeafNode)
{
NodeType node;
node.make_leaf();
EXPECT_TRUE(node.is_leaf());
node.set_leaf_index(42);
EXPECT_TRUE(node.is_leaf());
EXPECT_EQ(42, node.get_leaf_index());
const size_t LeafIndex = (size_t(1) << 31) - 1;
node.set_leaf_index(LeafIndex);
EXPECT_TRUE(node.is_leaf());
EXPECT_EQ(LeafIndex, node.get_leaf_index());
node.set_leaf_size(33);
EXPECT_TRUE(node.is_leaf());
EXPECT_EQ(LeafIndex, node.get_leaf_index());
}
TEST_CASE(TestInteriorNode)
{
NodeType node;
node.make_interior();
EXPECT_TRUE(node.is_interior());
node.set_child_node_index(42);
EXPECT_TRUE(node.is_interior());
EXPECT_EQ(42, node.get_child_node_index());
const size_t ChildIndex = (size_t(1) << 29) - 1;
node.set_child_node_index(ChildIndex);
EXPECT_TRUE(node.is_interior());
EXPECT_EQ(ChildIndex, node.get_child_node_index());
node.set_split_dim(1);
EXPECT_TRUE(node.is_interior());
EXPECT_EQ(ChildIndex, node.get_child_node_index());
EXPECT_EQ(1, node.get_split_dim());
node.set_split_abs(66.0);
EXPECT_TRUE(node.is_interior());
EXPECT_EQ(ChildIndex, node.get_child_node_index());
EXPECT_EQ(1, node.get_split_dim());
EXPECT_EQ(66.0, node.get_split_abs());
}
}
TEST_SUITE(Foundation_Math_BSP_Intersector)
{
class Leaf
: public NonCopyable
{
public:
void clear()
{
m_boxes.clear();
}
size_t get_size() const
{
return m_boxes.size();
}
void insert(const AABB3d& box)
{
m_boxes.push_back(box);
}
const AABB3d& get_box(const size_t i) const
{
return m_boxes[i];
}
AABB3d get_bbox() const
{
AABB3d bbox;
bbox.invalidate();
for (size_t i = 0; i < m_boxes.size(); ++i)
bbox.insert(m_boxes[i]);
return bbox;
}
size_t get_memory_size() const
{
return 0;
}
private:
vector<AABB3d> m_boxes;
};
struct LeafFactory
: public NonCopyable
{
Leaf* create_leaf()
{
return new Leaf();
}
};
struct LeafSplitter
: public NonCopyable
{
typedef bsp::LeafInfo<double, 3> LeafInfoType;
typedef Split<double> SplitType;
bool m_first_leaf;
LeafSplitter()
: m_first_leaf(true)
{
}
double get_priority(
const Leaf& leaf,
const LeafInfoType& leaf_info)
{
const double priority = m_first_leaf ? 1.0 : 0.0;
m_first_leaf = false;
return priority;
}
bool split(
const Leaf& leaf,
const LeafInfoType& leaf_info,
SplitType& split)
{
split.m_dimension = 0;
split.m_abscissa = 0.0;
return true;
}
void sort(
const Leaf& leaf,
const LeafInfoType& leaf_info,
const SplitType& split,
Leaf& left_leaf,
const LeafInfoType& left_leaf_info,
Leaf& right_leaf,
const LeafInfoType& right_leaf_info)
{
for (size_t i = 0; i < leaf.get_size(); ++i)
{
const AABB3d& box = leaf.get_box(i);
if (box.max[split.m_dimension] <= split.m_abscissa)
{
left_leaf.insert(box);
}
else if (box.min[split.m_dimension] >= split.m_abscissa)
{
right_leaf.insert(box);
}
else
{
left_leaf.insert(box);
right_leaf.insert(box);
}
}
}
};
class LeafVisitor
: public NonCopyable
{
public:
LeafVisitor()
: m_visited_leaf_count(0)
, m_closest_hit(numeric_limits<double>::max())
{
}
double visit(
const Leaf* leaf, // todo: why not a reference?
const Ray3d& ray,
const RayInfo3d& ray_info)
{
++m_visited_leaf_count;
for (size_t i = 0; i < leaf->get_size(); ++i)
{
const AABB3d& box = leaf->get_box(i);
double distance;
if (intersect(ray, ray_info, box, distance))
m_closest_hit = min(m_closest_hit, distance);
}
return m_closest_hit;
}
size_t get_visited_leaf_count() const
{
return m_visited_leaf_count;
}
double get_closest_hit() const
{
return m_closest_hit;
}
private:
size_t m_visited_leaf_count;
double m_closest_hit;
};
struct Fixture
{
typedef bsp::Tree<double, 3, Leaf> Tree;
typedef bsp::Intersector<double, Tree, LeafVisitor, Ray3d> Intersector;
Tree m_tree;
LeafVisitor m_leaf_visitor; // todo: Visitor or LeafVisitor?
Intersector m_intersector;
bsp::TraversalStatistics m_traversal_stats;
Fixture()
{
unique_ptr<Leaf> root_leaf(new Leaf());
root_leaf->insert(AABB3d(Vector3d(-1.0, -0.5, -0.2), Vector3d(0.0, 0.5, 0.2)));
root_leaf->insert(AABB3d(Vector3d(0.0, -0.5, -0.7), Vector3d(1.0, 0.5, 0.7)));
bsp::Builder<Tree, LeafFactory, LeafSplitter> builder;
LeafFactory leaf_factory;
LeafSplitter leaf_splitter;<|fim▁hole|> }
};
#ifdef FOUNDATION_BSP_ENABLE_TRAVERSAL_STATS
#define TRAVERSAL_STATISTICS , m_traversal_stats
#else
#define TRAVERSAL_STATISTICS
#endif
#pragma warning (push)
#pragma warning (disable : 4723) // potential division by 0
TEST_CASE_F(Intersect_GivenRayEmbeddedInSplitPlane_VisitsBothLeaves, Fixture)
{
Ray3d ray(Vector3d(0.0, 0.0, 1.0), Vector3d(0.0, 0.0, -1.0));
m_intersector.intersect(m_tree, ray, RayInfo3d(ray), m_leaf_visitor TRAVERSAL_STATISTICS);
EXPECT_EQ(2, m_leaf_visitor.get_visited_leaf_count());
EXPECT_FEQ(1.0 - 0.7, m_leaf_visitor.get_closest_hit());
}
TEST_CASE_F(Intersect_GivenRayPiercingLeftNode_VisitsLeftNode, Fixture)
{
Ray3d ray(Vector3d(-0.5, 0.0, 1.0), Vector3d(0.0, 0.0, -1.0));
m_intersector.intersect(m_tree, ray, RayInfo3d(ray), m_leaf_visitor TRAVERSAL_STATISTICS);
EXPECT_EQ(1, m_leaf_visitor.get_visited_leaf_count());
EXPECT_FEQ(1.0 - 0.2, m_leaf_visitor.get_closest_hit());
}
TEST_CASE_F(Intersect_GivenRayPiercingRightNode_VisitsRightNode, Fixture)
{
Ray3d ray(Vector3d(0.5, 0.0, 1.0), Vector3d(0.0, 0.0, -1.0));
m_intersector.intersect(m_tree, ray, RayInfo3d(ray), m_leaf_visitor TRAVERSAL_STATISTICS);
EXPECT_EQ(1, m_leaf_visitor.get_visited_leaf_count());
EXPECT_FEQ(1.0 - 0.7, m_leaf_visitor.get_closest_hit());
}
#pragma warning (pop)
#undef TRAVERSAL_STATISTICS
}<|fim▁end|> |
builder.build(m_tree, move(root_leaf), leaf_factory, leaf_splitter); |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker<|fim▁hole|>def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)<|fim▁end|> | |
<|file_name|>spoke.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""spoke -- Git plugin for GitHub integration
Copyright (C) 2012 Alex Headley <[email protected]>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import argparse
import os
from pprint import pprint
import functools
import inspect
import textwrap
import tempfile
import time
import subprocess
import git
import pygithub3
def guess_type(obj):
ok_types = [int, str, bool]
obj_type = type(obj)
if obj_type in ok_types:
return obj_type
else:
if obj_type == list or obj_type == tuple:
if len(obj):
obj_e_type = type(obj[0])
if obj_e_type in ok_types and \
all(type(e) == obj_e_type for e in obj[1:]):
return obj_e_type
return str
def guess_action(obj):
return {
bool: 'store_false' if obj else 'store_true',
}.get(guess_type(obj), 'store')
def guess_nargs(obj):
if guess_type(obj) == bool:
return 0
else:
try:
len(obj)
except TypeError:
return 1
else:
return '+'
def get_console_size():
with os.popen('stty size', 'r') as p:
return map(int, p.read().strip().split())
class ArgFunc(object):
@staticmethod
def define_args(**kwargs):
def wrapper(func):
for (arg, attrs) in kwargs.iteritems():
if 'default' in attrs and 'name' not in attrs:
attrs['name'] = '--' + arg.replace('_', '-')
if 'dest' not in attrs and 'name' in attrs:
attrs['dest'] = arg
func._argfunc_attrs = kwargs
return func
return wrapper
@staticmethod
def auto_define_args(func):
(args, pargs, kwargs, defaults) = inspect.getargspec(func)
if args[0] == 'self' or args[0] == 'cls':
args = args[1:]
defaults = defaults if defaults is not None else []
arg_no_defaults = args[:-len(defaults)]
arg_defaults = zip(args[-len(defaults):], defaults)
attrs = {}
for arg in arg_no_defaults:
arg_attrs = {
'metavar': arg.upper(),
}
attrs[arg] = arg_attrs
for (arg, default) in arg_defaults:
arg_attrs = {
'name': '--' + arg.replace('_', '-'),
'action': guess_action(default),
'default': default,
'dest': arg,
}
attrs[arg] = arg_attrs
if pargs is not None:
attrs[pargs] = {
'name': pargs,
'nargs': '*',
}
if kwargs is not None:
pass
func._argfunc_attrs = attrs
return func
def add_func(self, parser, func):
if hasattr(func, '_argfunc_attrs'):
for (arg, attrs) in func._argfunc_attrs.iteritems():
fixed_attrs = attrs.copy()
if 'name' in attrs:
command_name = fixed_attrs.pop('name')
fixed_attrs['dest'] = arg
else:
command_name = arg
parser.add_argument(command_name, **fixed_attrs)
def add_obj(self, parser, obj):
for func in (a for a in dir(obj) \
if callable(obj, a) and hasattr(getattr(obj, a), '_argfunc_attrs')):
self.add_func(parser, func)
class GithubActor(object):
"""
"""
CONFIG_NS = 'hub'
GIT_REMOTE_NAME = 'github'
FALLBACK_EDITOR = 'nano'
_current_repo = None
_current_user = None
_github = None
def __init__(self, output=None):
self._current_repo = self._init_repo()
creds = self._get_github_credentials(self._current_repo)
self._current_user = creds[0]
self._github = self._init_github(creds[0], creds[1], self._current_repo)
if output is not None:
self._output = output
def _output(self, obj, *pargs, **kwargs):
if issubclass(obj.__class__, basestring):
print unicode(obj).format(*pargs, **kwargs)
else:
try:
pprint(obj, indent=2)
except Exception:
print repr(obj)
def _init_repo(self):
try:
repo = git.Repo(os.getcwd())
except git.exc.InvalidGitRepositoryError:
repo = None
return repo
def _init_github(self, username, password, repo=None):
repo_name = self._get_repo_name(repo)
return pygithub3.Github(login=username, password=password,
user=username, repo=repo_name)
@property
def _current_repo_name(self):
return self._get_repo_name(self._current_repo)
def _get_repo_name(self, repo):
if repo is not None:
return os.path.basename(repo.working_tree_dir)
else:
return None
def _get_github_credentials(self, repo=None):
if repo is None:
user_cfg_file = os.path.expanduser('~/.gitconfig')
if os.path.exists(user_cfg_file):
cfg = git.config.GitConfigParser(user_cfg_file)
else:
raise ValueError("""Can\'t find a gitconfig file for github login info.
Set the login info with:
git config --global --add {0}.username <username>
git config --global --add {0}.password <password>
""".format(self.CONFIG_NS))
else:
cfg = repo.config_reader()
return (cfg.get_value(self.CONFIG_NS, 'username'),
cfg.get_value(self.CONFIG_NS, 'password'))
def _get_padding(self, f, iterable):
return max(len(f(i)) for i in iterable)
def _require_in_repo(func):
@functools.wraps(func)
def wrapper(self, *pargs, **kwargs):
if self._current_repo is None:
self._output('You need to be in a repo for this command')
else:
return func(self, *pargs, **kwargs)
try:
wrapper._argfunc_attrs = func._argfunc_attrs
except AttributeError:
pass
return wrapper
@ArgFunc.auto_define_args
def develop(self, org=None, **kwargs):
"""Clone a repo so you can start working on it, forking to your account
if needed
"""
target_user = kwargs.get('user', self._current_user)
target_repo = kwargs.get('repo', self._current_repo_name)
if os.path.exists(os.path.join(os.getcwd(), target_repo)):
raise ValueError('Looks like the repo already exists at {0}'.format(
os.path.join(os.getcwd(), target_repo)))
if target_user != self._current_user:
#need to fork first
self._output('Looks like someone else\'s repo, forking...')
try:
fork = self._github.repos.forks.create(
user=target_user,
repo=target_repo,
org=org,
)
except AssertionError:
pass
self._output('Waiting for GitHub to stop forking around...')
time.sleep(5)
self._output('Getting repo info...')
gh_repo = self._github.repos.get(
user=self._current_user,
repo=target_repo,
)
repo_path = os.path.join(os.getcwd(), gh_repo.name)
self._output('Cloning repo {0} ...', gh_repo.full_name)
git.repo.base.Repo.clone_from(gh_repo.ssh_url, repo_path)<|fim▁hole|> self._output('Repo cloned to {0}, enjoy!', repo_path)
@ArgFunc.auto_define_args
def repos_show(self, **kwargs):
"""Show a repo's info from GitHub
"""
display_tpl = '\n'.join([
'{repo.full_name: <48} {repo.language: <16} {repo.forks_count: >3} ' \
'Fork(s) {repo.watchers_count: >4} Watcher(s)',
'{repo.description}',
'{repo.html_url: <64} {repo.homepage}',
])
gh_repo = self._github.repos.get(
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output(display_tpl, repo=gh_repo)
@ArgFunc.define_args(
repo_type={'choices': ('all', 'owner', 'public', 'private', 'member'), 'default': 'all'},
)
def repos_list(self, repo_type='all', **kwargs):
"""List your or another user's repos
"""
repos = self._github.repos.list(
user=kwargs.get('user', self._current_user),
type=repo_type).all()
padding = self._get_padding(lambda r: r.name, repos)
for repo in repos:
fork_icon = 'V' if repo.fork else '|'
self._output(' {fork_icon} {name: <{padding}} -- {description}',
fork_icon=fork_icon, padding=padding, **vars(repo))
@ArgFunc.auto_define_args
def repos_create(self, description='', homepage='', private=False,
has_issues=False, has_wiki=False, has_downloads=False, in_org=None,
**kwargs):
"""Create a new repo on GitHub
"""
data = locals().copy()
del data['self'], data['kwargs'], data['in_org']
data['name'] = kwargs.get('repo', self._current_repo_name)
new_repo = self._github.repos.create(data, in_org)
@ArgFunc.auto_define_args
def repos_fork(self, org=None, **kwargs):
"""Fork a repo on GitHub to your account (or organization)
"""
try:
self._github.repos.forks.create(
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name),
org=org)
except AssertionError:
pass
@ArgFunc.auto_define_args
def repos_clone(self, **kwargs):
"""Clone a repo from GitHub
"""
repo_name = kwargs.get('repo', None)
if repo_name is None:
raise ValueError('Use --repo to tell me the repo name')
try:
github_repo = self._github.repos.get(
user=kwargs.get('user', self._current_user),
repo=repo_name)
except Exception as e:
#TODO make this not dumb
raise e
repo_path = os.path.join(os.getcwd(), repo_name)
if github_repo.permissions['push']:
git.repo.base.Repo.clone_from(github_repo.ssh_url, repo_path)
else:
git.repo.base.Repo.clone_from(github_repo.git_url, repo_path)
self._output('Cloned {user}/{repo} to {path}',
user=kwargs.get('user', self._current_user),
repo=repo_name,
path=repo_path)
@_require_in_repo
@ArgFunc.auto_define_args
def repos_addremote(self, remote_name=GIT_REMOTE_NAME, **kwargs):
"""Add a remote for the corresponding repo on GitHub
"""
actual_repo = self._current_repo
if remote_name in (rm.name for rm in actual_repo.remotes):
self._output('Looks like the "{0}" remote already exists',
remote_name)
else:
github_repo = self._github.repos.get(
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
if github_repo.permissions['push']:
#read-write, use ssh url
actual_repo.create_remote(remote_name, github_repo.ssh_url)
else:
#read only, use git url
actual_repo.create_remote(remote_name, github_repo.git_url)
self._output('"{0}" remote added', remote_name)
@ArgFunc.auto_define_args
def pr_show(self, pr_number, DUMMYOPT=None, **kwargs):
"""Display a pull request
"""
pr = self._github.pull_requests.get(pr_number,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output(vars(pr))
@ArgFunc.define_args(
state={'choices': ('open', 'closed'), 'default': 'open'},
)
def pr_list(self, state='open', **kwargs):
"""List the open pull requests for a repo
Note that the --state option is currently non-functional
"""
pull_requests = self._github.pull_requests.list(
user=kwargs.get('user', kwargs.get('user', self._current_user)),
repo=kwargs.get('repo', self._current_repo_name)).all()
padding = self._get_padding(lambda pr: pr.user['login'], pull_requests)
for pr in pull_requests:
commit_count = len(self._github.pull_requests.list_commits(pr.number,
user=kwargs.get('user', kwargs.get('user', self._current_user)),
repo=kwargs.get('repo', self._current_repo_name)).all())
self._output('#{number:0>4} {commit_count:0>2}c @{user[login]: <{padding}} {title} -- <{html_url}>',
padding=padding, commit_count=commit_count, **vars(pr))
@ArgFunc.auto_define_args
def pr_merge(self, pr_number, commit_message='', **kwargs):
"""Do a simple merge of a pull request (Merge Button)
"""
self._github.pull_requests.merge(number, commit_message,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output('Pull request #{0:0>4} merged!', pr_number)
@_require_in_repo
@ArgFunc.auto_define_args
def pr_addremote(self, pr_number, remote_name=None, **kwargs):
"""Add a remote for the source repo in a PR
"""
if remote_name is None:
remote_name = 'pr-{n:0>4}'.format(n=pr_number)
repo = self._current_repo
pr = self._github.pull_requests.get(pr_number,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
if remote_name in (rm.name for rm in repo.remotes):
self._output('Looks like the "{0}" remote already exists',
remote_name)
else:
repo.create_remote(remote_name, pr.head['repo']['git_url'])
self._output('"{0}" remote added', remote_name)
@ArgFunc.auto_define_args
def issues_show(self, issue_number, DUMMYOPT=None, **kwargs):
"""Display a specific issue
"""
issue = self._github.issues.get(issue_number,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
msg = [
'#{i.number:0>4} ({i.state}) -- {i.title}',
'@{i.user.login}:',
]
if issue.body:
msg.append(self._wrap_text_body(issue.body))
self._output('\n'.join(msg), i=issue)
comments = self._github.issues.comments.list(issue_number,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name)).all()
for comment in comments:
self._output('@{c.user.login}:\n{wrapped_body}',
c=comment, wrapped_body=self._wrap_text_body(comment.body))
def _wrap_text_body(self, text, padding=8):
"""Wrap :text: so that there are :padding: spaces on either side, based on
terminal width
"""
console_width = max(get_console_size()[1], padding * 3)
return '\n'.join(' ' * padding + line \
for line in textwrap.wrap(text.strip(), console_width - (padding * 2)))
@ArgFunc.auto_define_args
def issues_list(self, milestone='none', state='open', assignee='none', labels='',
sort='created', **kwargs):
"""List a repo's issues
"""
issues = self._github.issues.list_by_repo(
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name),
state=state,
assignee=assignee,
milestone=milestone,
labels=labels,
sort=sort,
)
for page in issues:
for issue in page:
self._output('#{issue.number:0>4} ({issue.state}) @{issue.user.login: <16} -- {issue.title}',
issue=issue)
@ArgFunc.auto_define_args
def issues_create(self, title=None, body=None, assignee=None, milestone=None,
labels=None, **kwargs):
"""Open a new issue
"""
data = locals().copy()
del data['self'], data['kwargs']
if data['labels'] is not None:
data['labels'] = [l.strip() for l in data['labels'].split(',')]
if data['body'] is None:
(_, path) = tempfile.mkstemp()
with open(path, 'w') as handle:
handle.write('# Put the body of your issue here\n' \
'# Lines starting with \'#\' are ignored\n' \
'# If you didn\'t provide a title, the first line here will be used\n')
subprocess.call([self._get_editor(), path])
with open(path, 'r') as handle:
body = [line.rstrip() for line in handle.readlines() \
if not line.startswith('#') and line.strip()]
if not data['title']:
data['title'] = body[0].strip()
data['body'] = '\n'.join(body[1:])
else:
data['body'] = '\n'.join(body)
os.unlink(path)
issue = self._github.issues.create(data,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output('Issue #{issue.number:0>4} created: {issue.html_url}',
issue=issue)
def _get_editor(self):
"""Get the editor from env variables
Looks at $EDITOR, then $VISUAL, then falls back to :FALLBACK_EDITOR:
"""
return os.environ.get('EDITOR',
os.environ.get('VISUAL',
self.FALLBACK_EDITOR))
@ArgFunc.auto_define_args
def issues_comment(self, issue_number, message=None, close=False, **kwargs):
"""Add a comment to an issue
"""
if message is None:
(_, path) = tempfile.mkstemp()
with open(path, 'w') as handle:
handle.write('# Write your comment here\n' \
'# Lines starting with \'#\' are ignored\n')
subprocess.call([self._get_editor(), path])
with open(path, 'r') as handle:
message = '\n'.join(line.rstrip() for line in handle.readlines() \
if not line.startswith('#') and line.strip())
os.unlink(path)
comment = self._github.issues.comments.create(issue_number, message,
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output('Comment {comment.id} added!', comment=comment)
if close:
self._github.issues.update(issue_number, {'state': 'closed'},
user=kwargs.get('user', self._current_user),
repo=kwargs.get('repo', self._current_repo_name))
self._output('Issue closed')
def build_parser(actor):
af = ArgFunc()
parser = argparse.ArgumentParser(description='git-hub - Do stuff with GitHub',
prog='git-hub')
parser.add_argument('--verbose', help='Display more output', action='store_true')
command_parsers = parser.add_subparsers(title='GitHub commands',
dest='command')
parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument('-u', '--user', help='Override target username')
parent_parser.add_argument('-r', '--repo', help='Override target repo name')
#oh god wat
command_verbs = dict((c, [v.split('_', 1)[1] for v in dir(actor) \
if v.startswith(c+'_') and callable(getattr(actor, v))]) \
for c in set(c.split('_')[0] for c in dir(actor) \
if not c.startswith('_') and callable(getattr(actor, c))))
for command in command_verbs:
for verb in command_verbs[command]:
command_verb = command + '_' + verb
cv_func = getattr(actor, command_verb)
attrs = {'parents': [parent_parser]}
try:
attrs['help'] = cv_func.__doc__.split('\n')[0].strip()
except AttributeError:
pass
verb_parser = command_parsers.add_parser(
command_verb.replace('_', '-'), **attrs)
af.add_func(verb_parser, cv_func)
develop_parser = command_parsers.add_parser('develop',
help=actor.develop.__doc__.split('\n')[0].strip(),
parents=[parent_parser])
af.add_func(develop_parser, actor.develop)
return parser
def main():
actor = GithubActor()
parser = build_parser(actor)
result = parser.parse_args()
command_verb = result.command.replace('-', '_')
del result.command
action = getattr(actor, command_verb)
return action(**vars(result))
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>types.test.ts<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { AdhocColumn } from '@superset-ui/core';
import {
isAdhocColumn,
isColumnMeta,
isSavedExpression,
ColumnMeta,
} from '../src';
const ADHOC_COLUMN: AdhocColumn = {
hasCustomLabel: true,
label: 'Adhoc column',
sqlExpression: 'case when 1 = 1 then 1 else 2 end',
};
const COLUMN_META: ColumnMeta = {
column_name: 'my_col',
};
const SAVED_EXPRESSION: ColumnMeta = {
column_name: 'Saved expression',
expression: 'case when 1 = 1 then 1 else 2 end',
};
describe('isColumnMeta', () => {
it('returns false for AdhocColumn', () => {
expect(isColumnMeta(ADHOC_COLUMN)).toEqual(false);
});
it('returns true for ColumnMeta', () => {
expect(isColumnMeta(COLUMN_META)).toEqual(true);
});<|fim▁hole|>describe('isAdhocColumn', () => {
it('returns true for AdhocColumn', () => {
expect(isAdhocColumn(ADHOC_COLUMN)).toEqual(true);
});
it('returns false for ColumnMeta', () => {
expect(isAdhocColumn(COLUMN_META)).toEqual(false);
});
});
describe('isSavedExpression', () => {
it('returns false for AdhocColumn', () => {
expect(isSavedExpression(ADHOC_COLUMN)).toEqual(false);
});
it('returns false for ColumnMeta without expression', () => {
expect(isSavedExpression(COLUMN_META)).toEqual(false);
});
it('returns true for ColumnMeta with expression', () => {
expect(isSavedExpression(SAVED_EXPRESSION)).toEqual(true);
});
});<|fim▁end|> | });
|
<|file_name|>convert.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Fri Dec 04 13:10:20 2015
@author: bayevskihk
"""
import sys
def main(argv):
number = len(argv)
data_addr = 0
last_addr = 0
if(number != 2):
return
else:
try:
data_addr = int(argv[0])
last_addr = int(argv[1])
except:
print ("Wrong arguments")
return
if(parse("build/obj/text.vh", "build/text.rom", data_addr) < 0):
print ("Wrong text file")
return
if(parse("build/obj/data.vh", "build/data.rom", last_addr - data_addr) < 0):
print ("Wrong text file")
return
print ("Convertion was successfull")
def parse(file_name, rom_name, addr_last):
hex_file = open(file_name, 'r')<|fim▁hole|># rom_file.truncate()
hex_parts = hex_file.readline()
line = ""
try:
hex_parts.index("@")
except:
return -1
attached = 0
words = 0
rom_file.write("@00000000\n");
while(1):
hex_parts = hex_file.readline()
# hex_parts = hex_parts.translate({None: "\n"})
hex_parts = hex_parts.split();
if(len(hex_parts) < 4):
break
for part in hex_parts:
if(len(part) == 0):
continue
line += part
attached += 1
if(attached == 4):
attached = 0
words += 1
rom_file.write(line + "\n")
line = ""
for i in range(addr_last - words):
rom_file.write("00000000\n")
rom_file.close()
return 0
if __name__ == '__main__':
main(sys.argv[1:])<|fim▁end|> | rom_file = open(rom_name, 'w') |
<|file_name|>no_0114_flatten_binary_tree_to_linked_list.rs<|end_file_name|><|fim▁begin|>// Definition for a binary tree node.
#[derive(Debug, PartialEq, Eq)]
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
TreeNode {
val,
left: None,
right: None,
}
}
}
use std::cell::RefCell;
use std::rc::Rc;
struct Solution;
impl Solution {
pub fn flatten(root: &mut Option<Rc<RefCell<TreeNode>>>) {
let mut curr = root.as_ref().map(|n| n.clone());<|fim▁hole|> while let Some(curr_node) = curr {
let mut curr_node = curr_node.borrow_mut();
if let Some(next_node) = curr_node.left.take() {
// 寻找前驱节点
// let mut predecessor = next_node.borrow_mut();
// while predecessor.right.is_some() {
// // ERROR! cannot assign to `predecessor` because it is borrowed
// predecessor = predecessor.right.as_mut().unwrap().borrow_mut();
// }
let mut predecessor = next_node.clone();
let mut predecessor_right = predecessor.borrow().right.clone();
while let Some(node) = predecessor_right {
predecessor_right = node.borrow().right.clone();
predecessor = node;
}
// 右子树当作前驱节点的右子树
predecessor.borrow_mut().right = curr_node.right.take();
// 当前节点的左树当作右树
curr_node.right = Some(next_node);
}
// 继续遍历右子节点
curr = curr_node.right.clone();
}
}
pub fn flatten_failed(root: &mut Option<Rc<RefCell<TreeNode>>>) {
if root.is_none() {
return;
}
let mut mut_root = root.as_mut().unwrap().borrow_mut();
let mut left = mut_root.left.take();
Self::flatten_failed(&mut left);
let mut right = mut_root.right.take();
Self::flatten_failed(&mut right);
// while let Some(node) = left.take() {
// left = node.borrow_mut().right.take();
// mut_root.right = Some(node);
// mut_root = mut_root.right.as_mut().unwrap().borrow_mut();
// }
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_flatten() {
let mut root = build_one_node(TreeNode {
val: 1,
left: build_one_node(TreeNode {
val: 2,
left: build_one_node(TreeNode::new(3)),
right: build_one_node(TreeNode::new(4)),
}),
right: build_one_node(TreeNode {
val: 5,
left: None,
right: build_one_node(TreeNode::new(6)),
}),
});
Solution::flatten(&mut root);
let want = build_one_node(TreeNode {
val: 1,
left: None,
right: build_one_node(TreeNode {
val: 2,
left: None,
right: build_one_node(TreeNode {
val: 3,
left: None,
right: build_one_node(TreeNode {
val: 4,
left: None,
right: build_one_node(TreeNode {
val: 5,
left: None,
right: build_one_node(TreeNode::new(6)),
}),
}),
}),
}),
});
assert_eq!(root, want);
}
fn build_one_node(node: TreeNode) -> Option<Rc<RefCell<TreeNode>>> {
Some(Rc::new(RefCell::new(node)))
}
}<|fim▁end|> | |
<|file_name|>feature.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import Counter
import numpy as np
STARTING_LABEL = '*' # Label of t=-1
STARTING_LABEL_INDEX = 0
def default_feature_func(_, X, t):
"""
Returns a list of feature strings.
(Default feature function)
:param X: An observation vector
:param t: time
:return: A list of feature strings
"""
length = len(X)
features = list()
features.append('U[0]:%s' % X[t][0])
features.append('POS_U[0]:%s' % X[t][1])
if t < length-1:
features.append('U[+1]:%s' % (X[t+1][0]))
features.append('B[0]:%s %s' % (X[t][0], X[t+1][0]))
features.append('POS_U[1]:%s' % X[t+1][1])
features.append('POS_B[0]:%s %s' % (X[t][1], X[t+1][1]))
if t < length-2:
features.append('U[+2]:%s' % (X[t+2][0]))
features.append('POS_U[+2]:%s' % (X[t+2][1]))
features.append('POS_B[+1]:%s %s' % (X[t+1][1], X[t+2][1]))
features.append('POS_T[0]:%s %s %s' % (X[t][1], X[t+1][1], X[t+2][1]))
if t > 0:
features.append('U[-1]:%s' % (X[t-1][0]))
features.append('B[-1]:%s %s' % (X[t-1][0], X[t][0]))
features.append('POS_U[-1]:%s' % (X[t-1][1]))
features.append('POS_B[-1]:%s %s' % (X[t-1][1], X[t][1]))
if t < length-1:
features.append('POS_T[-1]:%s %s %s' % (X[t-1][1], X[t][1], X[t+1][1]))
if t > 1:
features.append('U[-2]:%s' % (X[t-2][0]))
features.append('POS_U[-2]:%s' % (X[t-2][1]))
features.append('POS_B[-2]:%s %s' % (X[t-2][1], X[t-1][1]))
features.append('POS_T[-2]:%s %s %s' % (X[t-2][1], X[t-1][1], X[t][1]))
return features
class FeatureSet():
feature_dic = dict()
observation_set = set()
empirical_counts = Counter()
num_features = 0
label_dic = {STARTING_LABEL: STARTING_LABEL_INDEX}
label_array = [STARTING_LABEL]
feature_func = default_feature_func
def __init__(self, feature_func=None):
# Sets a custom feature function.
if feature_func is not None:
self.feature_func = feature_func
def scan(self, data):
"""
Constructs a feature set, a label set,
and a counter of empirical counts of each feature from the input data.
:param data: A list of (X, Y) pairs. (X: observation vector , Y: label vector)
"""
# Constructs a feature set, and counts empirical counts.
for X, Y in data:
prev_y = STARTING_LABEL_INDEX
for t in range(len(X)):
# Gets a label id
try:
y = self.label_dic[Y[t]]
except KeyError:
y = len(self.label_dic)
self.label_dic[Y[t]] = y
self.label_array.append(Y[t])
# Adds features
self._add(prev_y, y, X, t)
prev_y = y
def load(self, feature_dic, num_features, label_array):
self.num_features = num_features
self.label_array = label_array
self.label_dic = {label: i for label, i in enumerate(label_array)}
self.feature_dic = self.deserialize_feature_dic(feature_dic)
def __len__(self):
return self.num_features
def _add(self, prev_y, y, X, t):
"""
Generates features, constructs feature_dic.
:param prev_y: previous label
:param y: present label
:param X: observation vector
:param t: time
"""
for feature_string in self.feature_func(X, t):
if feature_string in self.feature_dic.keys():
if (prev_y, y) in self.feature_dic[feature_string].keys():
self.empirical_counts[self.feature_dic[feature_string][(prev_y, y)]] += 1
else:
feature_id = self.num_features
self.feature_dic[feature_string][(prev_y, y)] = feature_id
self.empirical_counts[feature_id] += 1
self.num_features += 1
if (-1, y) in self.feature_dic[feature_string].keys():
self.empirical_counts[self.feature_dic[feature_string][(-1, y)]] += 1
else:
feature_id = self.num_features
self.feature_dic[feature_string][(-1, y)] = feature_id
self.empirical_counts[feature_id] += 1
self.num_features += 1
else:
self.feature_dic[feature_string] = dict()
# Bigram feature
feature_id = self.num_features
self.feature_dic[feature_string][(prev_y, y)] = feature_id
self.empirical_counts[feature_id] += 1
self.num_features += 1
# Unigram feature
feature_id = self.num_features
self.feature_dic[feature_string][(-1, y)] = feature_id
self.empirical_counts[feature_id] += 1
self.num_features += 1
def get_feature_vector(self, prev_y, y, X, t):
"""
Returns a list of feature ids of given observation and transition.
:param prev_y: previous label
:param y: present label
:param X: observation vector
:param t: time
:return: A list of feature ids
"""
feature_ids = list()
for feature_string in self.feature_func(X, t):
try:
feature_ids.append(self.feature_dic[feature_string][(prev_y, y)])
except KeyError:
pass
return feature_ids
def get_labels(self):
"""
Returns a label dictionary and array.
"""
return self.label_dic, self.label_array
def calc_inner_products(self, params, X, t):
"""
Calculates inner products of the given parameters and feature vectors of the given observations at time t.
:param params: parameter vector
:param X: observation vector
:param t: time
:return:
"""
inner_products = Counter()
for feature_string in self.feature_func(X, t):
try:
for (prev_y, y), feature_id in self.feature_dic[feature_string].items():
inner_products[(prev_y, y)] += params[feature_id]
except KeyError:
pass
return [((prev_y, y), score) for (prev_y, y), score in inner_products.items()]
def get_empirical_counts(self):
empirical_counts = np.ndarray((self.num_features,))
for feature_id, counts in self.empirical_counts.items():
empirical_counts[feature_id] = counts<|fim▁hole|> def get_feature_list(self, X, t):
feature_list_dic = dict()
for feature_string in self.feature_func(X, t):
for (prev_y, y), feature_id in self.feature_dic[feature_string].items():
if (prev_y, y) in feature_list_dic.keys():
feature_list_dic[(prev_y, y)].add(feature_id)
else:
feature_list_dic[(prev_y, y)] = {feature_id}
return [((prev_y, y), feature_ids) for (prev_y, y), feature_ids in feature_list_dic.items()]
def serialize_feature_dic(self):
serialized = dict()
for feature_string in self.feature_dic.keys():
serialized[feature_string] = dict()
for (prev_y, y), feature_id in self.feature_dic[feature_string].items():
serialized[feature_string]['%d_%d' % (prev_y, y)] = feature_id
return serialized
def deserialize_feature_dic(self, serialized):
feature_dic = dict()
for feature_string in serialized.keys():
feature_dic[feature_string] = dict()
for transition_string, feature_id in serialized[feature_string].items():
prev_y, y = transition_string.split('_')
feature_dic[feature_string][(int(prev_y), int(y))] = feature_id
return feature_dic<|fim▁end|> | return empirical_counts
|
<|file_name|>EaselPlugin.js<|end_file_name|><|fim▁begin|>(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = global || self, factory(global.window = global.window || {}));
}(this, (function (exports) { 'use strict';
/*!
* EaselPlugin 3.4.2<|fim▁hole|> * Subject to the terms at https://greensock.com/standard-license or for
* Club GreenSock members, the agreement issued with that membership.
* @author: Jack Doyle, [email protected]
*/
var gsap,
_coreInitted,
_win,
_createJS,
_ColorFilter,
_ColorMatrixFilter,
_colorProps = "redMultiplier,greenMultiplier,blueMultiplier,alphaMultiplier,redOffset,greenOffset,blueOffset,alphaOffset".split(","),
_windowExists = function _windowExists() {
return typeof window !== "undefined";
},
_getGSAP = function _getGSAP() {
return gsap || _windowExists() && (gsap = window.gsap) && gsap.registerPlugin && gsap;
},
_getCreateJS = function _getCreateJS() {
return _createJS || _win && _win.createjs || _win || {};
},
_warn = function _warn(message) {
return console.warn(message);
},
_cache = function _cache(target) {
var b = target.getBounds && target.getBounds();
if (!b) {
b = target.nominalBounds || {
x: 0,
y: 0,
width: 100,
height: 100
};
target.setBounds && target.setBounds(b.x, b.y, b.width, b.height);
}
target.cache && target.cache(b.x, b.y, b.width, b.height);
_warn("EaselPlugin: for filters to display in EaselJS, you must call the object's cache() method first. GSAP attempted to use the target's getBounds() for the cache but that may not be completely accurate. " + target);
},
_parseColorFilter = function _parseColorFilter(target, v, plugin) {
if (!_ColorFilter) {
_ColorFilter = _getCreateJS().ColorFilter;
if (!_ColorFilter) {
_warn("EaselPlugin error: The EaselJS ColorFilter JavaScript file wasn't loaded.");
}
}
var filters = target.filters || [],
i = filters.length,
c,
s,
e,
a,
p,
pt;
while (i--) {
if (filters[i] instanceof _ColorFilter) {
s = filters[i];
break;
}
}
if (!s) {
s = new _ColorFilter();
filters.push(s);
target.filters = filters;
}
e = s.clone();
if (v.tint != null) {
c = gsap.utils.splitColor(v.tint);
a = v.tintAmount != null ? +v.tintAmount : 1;
e.redOffset = +c[0] * a;
e.greenOffset = +c[1] * a;
e.blueOffset = +c[2] * a;
e.redMultiplier = e.greenMultiplier = e.blueMultiplier = 1 - a;
} else {
for (p in v) {
if (p !== "exposure") if (p !== "brightness") {
e[p] = +v[p];
}
}
}
if (v.exposure != null) {
e.redOffset = e.greenOffset = e.blueOffset = 255 * (+v.exposure - 1);
e.redMultiplier = e.greenMultiplier = e.blueMultiplier = 1;
} else if (v.brightness != null) {
a = +v.brightness - 1;
e.redOffset = e.greenOffset = e.blueOffset = a > 0 ? a * 255 : 0;
e.redMultiplier = e.greenMultiplier = e.blueMultiplier = 1 - Math.abs(a);
}
i = 8;
while (i--) {
p = _colorProps[i];
if (s[p] !== e[p]) {
pt = plugin.add(s, p, s[p], e[p]);
if (pt) {
pt.op = "easel_colorFilter";
}
}
}
plugin._props.push("easel_colorFilter");
if (!target.cacheID) {
_cache(target);
}
},
_idMatrix = [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0],
_lumR = 0.212671,
_lumG = 0.715160,
_lumB = 0.072169,
_applyMatrix = function _applyMatrix(m, m2) {
if (!(m instanceof Array) || !(m2 instanceof Array)) {
return m2;
}
var temp = [],
i = 0,
z = 0,
y,
x;
for (y = 0; y < 4; y++) {
for (x = 0; x < 5; x++) {
z = x === 4 ? m[i + 4] : 0;
temp[i + x] = m[i] * m2[x] + m[i + 1] * m2[x + 5] + m[i + 2] * m2[x + 10] + m[i + 3] * m2[x + 15] + z;
}
i += 5;
}
return temp;
},
_setSaturation = function _setSaturation(m, n) {
if (isNaN(n)) {
return m;
}
var inv = 1 - n,
r = inv * _lumR,
g = inv * _lumG,
b = inv * _lumB;
return _applyMatrix([r + n, g, b, 0, 0, r, g + n, b, 0, 0, r, g, b + n, 0, 0, 0, 0, 0, 1, 0], m);
},
_colorize = function _colorize(m, color, amount) {
if (isNaN(amount)) {
amount = 1;
}
var c = gsap.utils.splitColor(color),
r = c[0] / 255,
g = c[1] / 255,
b = c[2] / 255,
inv = 1 - amount;
return _applyMatrix([inv + amount * r * _lumR, amount * r * _lumG, amount * r * _lumB, 0, 0, amount * g * _lumR, inv + amount * g * _lumG, amount * g * _lumB, 0, 0, amount * b * _lumR, amount * b * _lumG, inv + amount * b * _lumB, 0, 0, 0, 0, 0, 1, 0], m);
},
_setHue = function _setHue(m, n) {
if (isNaN(n)) {
return m;
}
n *= Math.PI / 180;
var c = Math.cos(n),
s = Math.sin(n);
return _applyMatrix([_lumR + c * (1 - _lumR) + s * -_lumR, _lumG + c * -_lumG + s * -_lumG, _lumB + c * -_lumB + s * (1 - _lumB), 0, 0, _lumR + c * -_lumR + s * 0.143, _lumG + c * (1 - _lumG) + s * 0.14, _lumB + c * -_lumB + s * -0.283, 0, 0, _lumR + c * -_lumR + s * -(1 - _lumR), _lumG + c * -_lumG + s * _lumG, _lumB + c * (1 - _lumB) + s * _lumB, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1], m);
},
_setContrast = function _setContrast(m, n) {
if (isNaN(n)) {
return m;
}
n += 0.01;
return _applyMatrix([n, 0, 0, 0, 128 * (1 - n), 0, n, 0, 0, 128 * (1 - n), 0, 0, n, 0, 128 * (1 - n), 0, 0, 0, 1, 0], m);
},
_parseColorMatrixFilter = function _parseColorMatrixFilter(target, v, plugin) {
if (!_ColorMatrixFilter) {
_ColorMatrixFilter = _getCreateJS().ColorMatrixFilter;
if (!_ColorMatrixFilter) {
_warn("EaselPlugin: The EaselJS ColorMatrixFilter JavaScript file wasn't loaded.");
}
}
var filters = target.filters || [],
i = filters.length,
matrix,
startMatrix,
s,
pg;
while (--i > -1) {
if (filters[i] instanceof _ColorMatrixFilter) {
s = filters[i];
break;
}
}
if (!s) {
s = new _ColorMatrixFilter(_idMatrix.slice());
filters.push(s);
target.filters = filters;
}
startMatrix = s.matrix;
matrix = _idMatrix.slice();
if (v.colorize != null) {
matrix = _colorize(matrix, v.colorize, Number(v.colorizeAmount));
}
if (v.contrast != null) {
matrix = _setContrast(matrix, Number(v.contrast));
}
if (v.hue != null) {
matrix = _setHue(matrix, Number(v.hue));
}
if (v.saturation != null) {
matrix = _setSaturation(matrix, Number(v.saturation));
}
i = matrix.length;
while (--i > -1) {
if (matrix[i] !== startMatrix[i]) {
pg = plugin.add(startMatrix, i, startMatrix[i], matrix[i]);
if (pg) {
pg.op = "easel_colorMatrixFilter";
}
}
}
plugin._props.push("easel_colorMatrixFilter");
if (!target.cacheID) {
_cache();
}
plugin._matrix = startMatrix;
},
_initCore = function _initCore(core) {
gsap = core || _getGSAP();
if (_windowExists()) {
_win = window;
}
if (gsap) {
_coreInitted = 1;
}
};
var EaselPlugin = {
version: "3.4.2",
name: "easel",
init: function init(target, value, tween, index, targets) {
if (!_coreInitted) {
_initCore();
if (!gsap) {
_warn("Please gsap.registerPlugin(EaselPlugin)");
}
}
this.target = target;
var p, pt, tint, colorMatrix, end, labels, i;
for (p in value) {
end = value[p];
if (p === "colorFilter" || p === "tint" || p === "tintAmount" || p === "exposure" || p === "brightness") {
if (!tint) {
_parseColorFilter(target, value.colorFilter || value, this);
tint = true;
}
} else if (p === "saturation" || p === "contrast" || p === "hue" || p === "colorize" || p === "colorizeAmount") {
if (!colorMatrix) {
_parseColorMatrixFilter(target, value.colorMatrixFilter || value, this);
colorMatrix = true;
}
} else if (p === "frame") {
if (typeof end === "string" && end.charAt(1) !== "=" && (labels = target.labels)) {
for (i = 0; i < labels.length; i++) {
if (labels[i].label === end) {
end = labels[i].position;
}
}
}
pt = this.add(target, "gotoAndStop", target.currentFrame, end, index, targets, Math.round);
if (pt) {
pt.op = p;
}
} else if (target[p] != null) {
this.add(target, p, "get", end);
}
}
},
render: function render(ratio, data) {
var pt = data._pt;
while (pt) {
pt.r(ratio, pt.d);
pt = pt._next;
}
if (data.target.cacheID) {
data.target.updateCache();
}
},
register: _initCore
};
EaselPlugin.registerCreateJS = function (createjs) {
_createJS = createjs;
};
_getGSAP() && gsap.registerPlugin(EaselPlugin);
exports.EaselPlugin = EaselPlugin;
exports.default = EaselPlugin;
Object.defineProperty(exports, '__esModule', { value: true });
})));<|fim▁end|> | * https://greensock.com
*
* @license Copyright 2008-2020, GreenSock. All rights reserved. |
<|file_name|>WordCombinations.py<|end_file_name|><|fim▁begin|># not optimized yet, should be optimized by dp<|fim▁hole|>
def combine(word, k):
if k < 1 or k > len(word):
return []
if k == 1:
return [word[i] for i in range(len(word))]
res = combine(word[:-1], k)
tmp = combine(word[:-1], k - 1)
for t in tmp:
res.append(t + word[-1])
return res
res = combine('love', 2)
print
res
for r in res:
print
r<|fim▁end|> | |
<|file_name|>osx_say.py<|end_file_name|><|fim▁begin|>import os
import re
import subprocess
from utils import whereis_exe
class osx_voice():
def __init__(self, voice_line):
mess = voice_line.split(' ')
cleaned = [ part for part in mess if len(part)>0 ]
self.name = cleaned[0]
self.locality = cleaned[1]
self.desc = cleaned[2].replace('# ', '')
def __str__(self):
return self.name + ' ' + self.locality + ' ' + self.desc
def fetch_voices():
osx_voices = []
if whereis_exe("say"):
voices_raw = os.popen("say -v ?").read()<|fim▁hole|> except IndexError:
pass
return osx_voices
def speak(text, voice, rate):
if whereis_exe("say"):
subprocess.call(["say", text, "-v", voice, "-r", rate])<|fim▁end|> | voice_lines = voices_raw.split('\n')
for line in voice_lines:
try:
osx_voices.append(osx_voice(line)) |
<|file_name|>exit_codes.py<|end_file_name|><|fim▁begin|>SUCCESS = 0
FAILURE = 1 # NOTE: click.abort() uses this
<|fim▁hole|><|fim▁end|> | # for when tests are already running
ALREADY_RUNNING = 2 |
<|file_name|>preprocess.py<|end_file_name|><|fim▁begin|>from keras.applications import imagenet_utils
from keras.applications import mobilenet
def dummyPreprocessInput(image):
image -= 127.5<|fim▁hole|>
def getPreprocessFunction(preprocessType):
if preprocessType == "dummy":
return dummyPreprocessInput
elif preprocessType == "mobilenet":
return mobilenet.preprocess_input
elif preprocessType == "imagenet":
return imagenet_utils.preprocess_input
else:
raise Exception(preprocessType + " not supported")<|fim▁end|> | return image |
<|file_name|>factories.py<|end_file_name|><|fim▁begin|>from datetime import datetime
import factory
from zds.forum.factories import PostFactory, TopicFactory
from zds.gallery.factories import GalleryFactory, UserGalleryFactory
from zds.utils.factories import LicenceFactory, SubCategoryFactory
from zds.utils.models import Licence
from zds.tutorialv2.models.database import PublishableContent, Validation, ContentReaction
from zds.tutorialv2.models.versioned import Container, Extract
from zds.tutorialv2.publication_utils import publish_content
from zds.tutorialv2.utils import init_new_repo
text_content = "Ceci est un texte bidon, **avec markown**"
tricky_text_content = (
"Ceci est un texte contenant plein d'images, pour la publication. Le modifier affectera le test !\n\n"
"# Les images\n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"# Et donc ...\n\n"
"Voilà :)"
)
class PublishableContentFactory(factory.django.DjangoModelFactory):
"""
Factory that creates a PublishableContent.
"""<|fim▁hole|>
title = factory.Sequence("Mon contenu No{}".format)
description = factory.Sequence("Description du contenu No{}".format)
type = "TUTORIAL"
creation_date = datetime.now()
pubdate = datetime.now()
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
light = attrs.pop("light", True)
author_list = attrs.pop("author_list", None)
add_license = attrs.pop("add_license", True)
add_category = attrs.pop("add_category", True)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
licence = attrs.get("licence", None)
auths = author_list or []
if add_license:
given_licence = licence or Licence.objects.first()
if isinstance(given_licence, str) and given_licence:
given_licence = Licence.objects.filter(title=given_licence).first() or Licence.objects.first()
licence = given_licence or LicenceFactory()
text = text_content
if not light:
text = tricky_text_content
publishable_content = super()._generate(create, attrs)
publishable_content.gallery = GalleryFactory()
publishable_content.licence = licence
for auth in auths:
publishable_content.authors.add(auth)
if add_category:
publishable_content.subcategory.add(SubCategoryFactory())
publishable_content.save()
for author in publishable_content.authors.all():
UserGalleryFactory(user=author, gallery=publishable_content.gallery, mode="W")
init_new_repo(publishable_content, text, text)
return publishable_content
class ContainerFactory(factory.Factory):
"""
Factory that creates a Container.
"""
class Meta:
model = Container
title = factory.Sequence(lambda n: "Mon container No{}".format(n + 1))
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
db_object = attrs.pop("db_object", None)
light = attrs.pop("light", True)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
parent = attrs.get("parent", None)
# Needed because we use container.title later
container = super()._generate(create, attrs)
text = text_content
if not light:
text = tricky_text_content
sha = parent.repo_add_container(container.title, text, text)
container = parent.children[-1]
if db_object:
db_object.sha_draft = sha
db_object.save()
return container
class ExtractFactory(factory.Factory):
"""
Factory that creates a Extract.
"""
class Meta:
model = Extract
title = factory.Sequence(lambda n: "Mon extrait No{}".format(n + 1))
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
light = attrs.pop("light", True)
db_object = attrs.pop("db_object", None)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
container = attrs.get("container", None)
# Needed because we use extract.title later
extract = super()._generate(create, attrs)
parent = container
text = text_content
if not light:
text = tricky_text_content
sha = parent.repo_add_extract(extract.title, text)
extract = parent.children[-1]
if db_object:
db_object.sha_draft = sha
db_object.save()
return extract
class ContentReactionFactory(factory.django.DjangoModelFactory):
"""
Factory that creates a ContentReaction.
"""
class Meta:
model = ContentReaction
ip_address = "192.168.3.1"
text = "Bonjour, je me présente, je m'appelle l'homme au texte bidonné"
@classmethod
def _generate(cls, create, attrs):
note = super()._generate(create, attrs)
note.pubdate = datetime.now()
note.save()
note.related_content.last_note = note
note.related_content.save()
return note
class BetaContentFactory(PublishableContentFactory):
"""
Factory that creates a PublishableContent with a beta version and a beta topic.
"""
@classmethod
def _generate(cls, create, attrs):
# This parameter is only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (it is removed from attrs).
beta_forum = attrs.pop("forum", None)
# Creates the PublishableContent (see PublishableContentFactory._generate() for more info)
publishable_content = super()._generate(create, attrs)
if publishable_content.authors.count() > 0 and beta_forum is not None:
beta_topic = TopicFactory(
title="[beta]" + publishable_content.title, author=publishable_content.authors.first(), forum=beta_forum
)
publishable_content.sha_beta = publishable_content.sha_draft
publishable_content.beta_topic = beta_topic
publishable_content.save()
PostFactory(topic=beta_topic, position=1, author=publishable_content.authors.first())
beta_topic.save()
return publishable_content
class PublishedContentFactory(PublishableContentFactory):
"""
Factory that creates a PublishableContent and the publish it.
"""
@classmethod
def _generate(cls, create, attrs):
# This parameter is only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (it is removed from attrs).
is_major_update = attrs.pop("is_major_update", True)
# Creates the PublishableContent (see PublishableContentFactory._generate() for more info)
content = super()._generate(create, attrs)
published = publish_content(content, content.load_version(), is_major_update)
content.sha_public = content.sha_draft
content.public_version = published
content.save()
return content
class ValidationFactory(factory.django.DjangoModelFactory):
"""
Factory that creates a Validation.
"""
class Meta:
model = Validation<|fim▁end|> |
class Meta:
model = PublishableContent |
<|file_name|>ServiceWorkerManager.ts<|end_file_name|><|fim▁begin|>import KeyValueDatabase from "shared/idb/KeyValueDatabase";
import { PreferenceArray } from "shared/preferences";
import { EventEmitterInterface } from "shared/types/helpers";
import { delay } from "shared/util";
import GlobalEvents from "ui/platform/GlobalEvents";
import EventEmitter from "vendor/events";
import Page from "./dom/Page";
import Env from "./Env";
const UPDATE_INTERVAL = 15 * 60 * 1000;
export default interface ServiceWorkerManager
extends EventEmitterInterface<{
updateAvailable: (handler: (respondedWith: Promise<boolean>) => void) => void;
}> {}
export default class ServiceWorkerManager extends EventEmitter {
private _page: Page;
private _globalEvents: GlobalEvents;
private _env: Env;
private _db: KeyValueDatabase;
private _registration: null | Promise<ServiceWorkerRegistration | null>;
private _started: boolean;
private _updateAvailableNotified: boolean;
private _lastUpdateChecked: number;
private _currentUpdateCheck: null | Promise<void>;
private _preferencesSaved: boolean;
private _updateCheckInterval: number;
constructor(deps: { page: Page; globalEvents: GlobalEvents; env: Env; db: KeyValueDatabase }) {
super();<|fim▁hole|> this._page = deps.page;
this._globalEvents = deps.globalEvents;
this._env = deps.env;
this._db = deps.db;
this._registration = null;
this._started = false;
this._updateAvailableNotified = false;
this._lastUpdateChecked = Date.now();
this._currentUpdateCheck = null;
this._preferencesSaved = false;
this._updateCheckInterval = this._page.setInterval(this._updateChecker, 10000);
this._globalEvents.on(`foreground`, this._foregrounded);
this._globalEvents.on(`background`, this._backgrounded);
this._globalEvents.on("shutdown", this._appClosed);
}
get controller() {
const sw = this._page.navigator().serviceWorker;
return sw && sw.controller;
}
_canSavePreferences() {
return !!this.controller && !this._preferencesSaved;
}
_savePreferences(preferences: PreferenceArray) {
const { controller } = this;
if (this._canSavePreferences()) {
this._preferencesSaved = true;
try {
controller!.postMessage({
action: `savePreferences`,
preferences,
});
} catch (e) {
// NOOP
}
}
}
_appClosed = (preferences: PreferenceArray) => {
this._savePreferences(preferences);
};
_updateChecker = () => {
if (
this._registration &&
Date.now() - this._lastUpdateChecked > UPDATE_INTERVAL &&
!this._updateAvailableNotified &&
!this._currentUpdateCheck
) {
this._checkForUpdates();
}
};
_backgrounded = () => {
this._page.clearInterval(this._updateCheckInterval);
this._updateCheckInterval = -1;
};
_foregrounded = () => {
this._updateCheckInterval = this._page.setInterval(this._updateChecker, 10000);
this._updateChecker();
};
_checkForUpdates() {
this._lastUpdateChecked = Date.now();
this._currentUpdateCheck = (async () => {
try {
const reg = await this._registration;
await reg!.update();
} catch (e) {
// Noop
} finally {
this._currentUpdateCheck = null;
}
})();
}
checkForUpdates() {
if (this._registration && !this._updateAvailableNotified && this._currentUpdateCheck) {
this._checkForUpdates();
}
}
_updateAvailable = async (worker: ServiceWorker) => {
this._updateAvailableNotified = true;
let nextAskTimeout = 15 * 1000;
// eslint-disable-next-line no-constant-condition
while (true) {
let shouldRefreshPromise: Promise<boolean> | undefined;
try {
this.emit("updateAvailable", respondedWith => {
shouldRefreshPromise = respondedWith;
});
const shouldRefresh = await shouldRefreshPromise;
if (shouldRefresh === true) {
worker.postMessage({ action: `skipWaiting` });
return;
}
} catch (e) {
uiLog(e.message);
}
await delay(nextAskTimeout);
nextAskTimeout += 10000;
nextAskTimeout = Math.min(nextAskTimeout, 60 * 1000);
}
};
_updateFound = (worker: ServiceWorker) => {
worker.addEventListener(`statechange`, () => {
if (worker.state === `installed`) {
void this._updateAvailable(worker);
}
});
};
start() {
if (this._started || !this._page.navigator().serviceWorker) return;
this._started = true;
this._registration = (async () => {
try {
const reg = await this._page.navigator().serviceWorker.register(process.env.SERVICE_WORKER_PATH!);
if (!this.controller) return reg;
if (reg.waiting) {
void this._updateAvailable(reg.waiting);
} else if (reg.installing) {
this._updateFound(reg.installing);
} else {
reg.addEventListener(`updatefound`, () => {
void this._updateFound(reg!.installing!);
});
}
return reg;
} catch (e) {
if (!this._env.isDevelopment()) {
throw e;
} else {
self.console.log(e.message);
}
return null;
}
})();
let reloading = false;
this._page.navigator().serviceWorker.addEventListener(`controllerchange`, () => {
if (reloading) return;
reloading = true;
this._page.location().reload();
});
}
loadPreferences() {
if (!this._started) {
this.start();
}
return this._db.getAll();
}
}<|fim▁end|> | |
<|file_name|>jsdoc.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = {
plugins: ['jsdoc'],
rules: {
'jsdoc/check-access': 'error',
'jsdoc/check-alignment': 'error',
'jsdoc/check-examples': 'error',
'jsdoc/check-indentation': 'error',
'jsdoc/check-param-names': 'error',
'jsdoc/check-property-names': 'error',
'jsdoc/check-syntax': 'error',
'jsdoc/check-tag-names': 'error',
'jsdoc/check-types': 'error',
'jsdoc/check-values': 'error',
'jsdoc/empty-tags': 'error',
'jsdoc/implements-on-classes': 'error',
'jsdoc/match-description': 'error',
'jsdoc/newline-after-description': 'error',
'jsdoc/no-bad-blocks': 'error',
'jsdoc/no-defaults': 'error',
'jsdoc/no-types': 'off',
'jsdoc/no-undefined-types': 'error',
'jsdoc/require-description': 'off',
'jsdoc/require-description-complete-sentence': 'error',
'jsdoc/require-example': 'off',
'jsdoc/require-file-overview': 'off',
'jsdoc/require-hyphen-before-param-description': 'error',
'jsdoc/require-jsdoc': 'off',
'jsdoc/require-param': 'error',
'jsdoc/require-param-description': 'error',
'jsdoc/require-param-name': 'error',
'jsdoc/require-param-type': 'error',
'jsdoc/require-property': 'error',
'jsdoc/require-property-description': 'error',
'jsdoc/require-property-name': 'error',
'jsdoc/require-property-type': 'error',<|fim▁hole|> 'jsdoc/require-returns': 'error',
'jsdoc/require-returns-check': 'error',
'jsdoc/require-returns-description': 'error',
'jsdoc/require-returns-type': 'error',
'jsdoc/valid-types': 'error'
}
};<|fim▁end|> | |
<|file_name|>generator.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.modeling.fitting.modelgenerators.generator Contains the abstract ModelGenerator class.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard modules
from abc import abstractmethod, ABCMeta
from collections import OrderedDict
# Import the relevant PTS classes and modules
from ....core.tools.logging import log
from ..component import FittingComponent
# -----------------------------------------------------------------
class ModelGenerator(FittingComponent):
"""
This class...
"""
__metaclass__ = ABCMeta
# -----------------------------------------------------------------
def __init__(self):
"""
The constructor ...
:return:
"""
# Call the constructor of the base class
super(ModelGenerator, self).__init__()
# The dictionary with the parameter ranges
self.ranges = OrderedDict()
# The dictionary with the list of the model parameters
self.parameters = OrderedDict()
# -----------------------------------------------------------------
@property
def parameter_names(self):
"""
This function ...
:return:
"""
return self.ranges.keys()
# -----------------------------------------------------------------
@property
def nparameters(self):
"""
This function ...
:return:
"""
return len(self.ranges)
# -----------------------------------------------------------------
@property
def nmodels(self):
"""
This function ...
:return:
"""
return len(self.parameters[self.ranges.keys()[0]])
# -----------------------------------------------------------------
@property
def parameter_minima(self):
"""
This function ...
:return:
"""
minima = []
for name in self.ranges: minima.append(self.ranges[name].min)
# Return the minimal parameter values
return minima
# -----------------------------------------------------------------
@property
def parameter_maxima(self):
"""
This function ...
:return:
"""
maxima = []
for name in self.ranges: maxima.append(self.ranges[name].max)
# Return the maximal parameter values
return maxima
# -----------------------------------------------------------------
def add_parameter(self, name, par_range):
"""
This function ...
:param name:
:param par_range:
:return:
"""
self.ranges[name] = par_range
# -----------------------------------------------------------------
def run(self):
"""
This function ...
:return:
"""
# 1. Call the setup function
self.setup()
# 2. Load the necessary input
self.load_input()
# 3. Initialize the animations
self.initialize_animations()
# 4. Generate the model parameters
self.generate()
# -----------------------------------------------------------------
def setup(self):
"""
This function ...
:return:
"""
# Call the setup of the base class
super(ModelGenerator, self).setup()
# -----------------------------------------------------------------
def load_input(self):
"""
This function ...
:return:
"""<|fim▁hole|> # -----------------------------------------------------------------
def initialize_animations(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Initializing the animations ...")
# Initialize the scatter animation
self.scatter_animation = ScatterAnimation(self.ranges["FUV young"], self.ranges["FUV ionizing"],
self.ranges["Dust mass"])
self.scatter_animation.x_label = "FUV luminosity of young stars"
self.scatter_animation.y_label = "FUV luminosity of ionizing stars"
self.scatter_animation.z_label = "Dust mass"
# Initialize the young FUV luminosity distribution animation
self.fuv_young_animation = DistributionAnimation(self.ranges["FUV young"][0], self.ranges["FUV young"][1],
"FUV luminosity of young stars", "New models")
self.fuv_young_animation.add_reference_distribution("Previous models", self.distributions["FUV young"])
# Initialize the ionizing FUV luminosity distribution animation
self.fuv_ionizing_animation = DistributionAnimation(self.ranges["FUV ionizing"][0],
self.ranges["FUV ionizing"][1],
"FUV luminosity of ionizing stars", "New models")
self.fuv_ionizing_animation.add_reference_distribution("Previous models", self.distributions["FUV ionizing"])
# Initialize the dust mass distribution animation
self.dust_mass_animation = DistributionAnimation(self.ranges["Dust mass"][0], self.ranges["Dust mass"][1],
"Dust mass", "New models")
self.dust_mass_animation.add_reference_distribution("Previous models", self.distributions["Dust mass"])
# -----------------------------------------------------------------
@abstractmethod
def generate(self):
"""
This function ...
:return:
"""
pass
# -----------------------------------------------------------------
def update_animations(self, young_luminosity, ionizing_luminosity, dust_mass):
"""
This function ...
:param young_luminosity:
:param ionizing_luminosity:
:param dust_mass:
:return:
"""
# Add the point (and thus a frame) to the animation of parameter points
self.scatter_animation.add_point(young_luminosity, ionizing_luminosity, dust_mass)
# Update the distribution animations
if self.nmodels > 1:
# Add a frame to the animation of the distribution of the FUV luminosity of young starss
self.fuv_young_animation.add_value(young_luminosity)
# Add a frame to the animation of the distribution of the FUV luminosity of ionizing stars
self.fuv_ionizing_animation.add_value(ionizing_luminosity)
# Add a frame to the animation of the distribution of the dust mass
self.dust_mass_animation.add_value(dust_mass)
# -----------------------------------------------------------------<|fim▁end|> |
pass
|
<|file_name|>bucket_reader.go<|end_file_name|><|fim▁begin|>package outlet
import (
"fmt"
"l2met/bucket"
"l2met/store"
"time"
)
type BucketReader struct {
Store store.Store
Interval time.Duration
Partition string
Ttl uint64
NumOutlets int
NumScanners int
Inbox chan *bucket.Bucket
Outbox chan *bucket.Bucket
}
<|fim▁hole|> rdr.NumScanners = c
rdr.NumOutlets = c
rdr.Interval = i
rdr.Store = st
return rdr
}
func (r *BucketReader) Start(out chan *bucket.Bucket) {
r.Outbox = out
go r.scan()
for i := 0; i < r.NumOutlets; i++ {
go r.outlet()
}
}
func (r *BucketReader) scan() {
for t := range time.Tick(r.Interval) {
buckets, err := r.Store.Scan(t)
if err != nil {
fmt.Printf("at=bucket.scan error=%s\n", err)
continue
}
for bucket := range buckets {
r.Inbox <- bucket
}
}
}
func (r *BucketReader) outlet() {
for b := range r.Inbox {
r.Store.Get(b)
r.Outbox <- b
}
}<|fim▁end|> | func NewBucketReader(sz, c int, i time.Duration, st store.Store) *BucketReader {
rdr := new(BucketReader)
rdr.Partition = "bucket-reader"
rdr.Inbox = make(chan *bucket.Bucket, sz) |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! [](https://travis-ci.org/lawliet89/rowdy)
//! [](https://dependencyci.com/github/lawliet89/rowdy)
//! [](https://crates.io/crates/rowdy)
//! [](https://github.com/lawliet89/rowdy)
//! [](https://docs.rs/rowdy)
//!
//! Documentation: [Stable](https://docs.rs/rowdy) | [Master](https://lawliet89.github.io/rowdy/)
//!
//! `rowdy` is a [Rocket](https://rocket.rs/) based JSON Web token based authentication server
//! based off Docker Registry's
//! [authentication protocol](https://docs.docker.com/registry/spec/auth/).
//!
//! # Features
//!
//! - `simple_authenticator`: A simple CSV based authenticator
//! - `ldap_authenticator`: An LDAP based authenticator
//!
//! By default, the `simple_authenticator` feature is turned on.
//!
//! # `rowdy` Authentication Flow
//!
//! The authentication flow is inspired by
//! [Docker Registry](https://docs.docker.com/registry/spec/auth/) authentication specification.
//!
//! ## JSON Web Tokens
//!
//! Authentication makes use of two types of [JSON Web Tokens (JWT)](https://jwt.io/):
//! Access and Refresh tokens.
//!
//! ### Access Token
//!
//! The access token is a short lived JWT that allows users to access resources within the scope
//! that they are allowed to. The access token itself contains enough information for services
//! to verify the user and their permissions in a stateless manner.
//!
//! ### Refresh Token
//!
//! The refresh token allows users to retrieve a new access token without needing to
//! re-authenticate. As such, the refresh token is longer lived, but can be revoked.
//!
//! ## Authentication Flow
//!
//! 1. Client attempts to access a resource on a protected service.
//! 1. Service responds with a `401 Unauthorized` authentication challenge with information on
//! how to authenticate
//! provided in the `WWW-Authenticate` response header.
//! 1. Using the information from the previous step, the client authenticates with the
//! authentication server. The client
//! will receive, among other information, opaque access and refresh tokens.
//! 1. The client retries the original request with the Bearer token embedded in the request’s
//! Authorization header.
//! 1. The service authorizes the client by validating the Bearer token and the claim set
//! embedded within it and
//! proceeds as usual.
//!
//! ### Authentication Challenge
//!
//! Services will challenge users who do not provide a valid token via the HTTP response
//! `401 Unauthorized`. Details for
//! authentication is provided in the `WWW-Authenticate` header.
//!
//! ```text
//! Www-Authenticate: Bearer realm="https://www.auth.com",service="https://www.example.com",scope="all"
//! ```
//!
//! The `realm` field indicates the authentcation server endpoint which clients should proceed to
//! authenticate against.
//!
//! The `service` field indicates the `service` value that clients should use when attempting to
//! authenticate at `realm`.
//!
//! The `scope` field indicates the `scope` value that clients should use when attempting to
//! authenticate at `realm`.
//!
//! ### Retrieving an Access Token (and optionally Refresh Token) from the Authentication Server
//!
//! A HTTP `GET` request should be made to the `realm` endpoint provided above. The endpoint will
//! support the following uery paremeters:
//!
//! - `service`: The service that the client is authenticating for. This should be the same as
//! the `service` value in the previous step
//! - `scope`: The scope that the client wishes to authenticate for.
//! This should be the same as the `scope` value in the previous step.
//! - `offline_token`: Set to `true` if a refresh token is also required. Defaults to `false`.
//! Cannot be set to `true` when using a refresh token to retrieve a new access token.
//!
//! When authenticating for the first time, clients should send the user's username and passwords
//! in the form of `Basic` authentication. If the client already has a prior refresh token and
//! would like to obtain a new access token, the client should send the refresh token in the form
//! of `Bearer` authentication.
//!
//! If successful, the authentcation server will return a `200 OK` response with a
//! JSON body containing the following fields:
//!
//! - `token`: An opaque Access (`Bearer`) token that clients should supply to subsequent requests
//! in the `Authorization` header.
//! - `expires_in`: The duration in seconds since the token was issued that it will remain valid.
//! - `issued_at`: RFC3339-serialized UTC standard time at which a given token was issued.
//! - `refresh_token`: An opaque `Refresh` token which can be used to get additional access
//! tokens for the same subject with different scopes. This token should be kept secure by
//! the client and only sent to the authorization server which issues access tokens.
//! This field will only be set when `offline_token=true` is provided in the request.
//!
//! If this fails, the server will return with the appropriate `4xx` response.
//!
//! ### Using the Access Token
//!
//! Once the client has a token, it will try the request again with the token placed in the
//! HTTP Authorization header like so:
//!
//! ```text
//! Authorization: Bearer <token>
//! ```
//!
//! ### Using the Refresh Token to Retrieve a New Access Token
//!
//! When the client's Access token expires, and it has previously asked for a Refresh Token,
//! the client can make a `GET` request to the same endpoint that the client used to retrieve the
//! access token (the `realm` URL in an authentication challenge).
//!
//! The steps are described in the section "Retrieving an Access Token" above. The process is the
//! same as the initial authentication except that instead of using `Basic` authentication,
//! the client should instead send the refresh token retrieved prior as `Bearer` authentication.
//! Also, `offline_token` cannot be requested for when requesting for a new access token using a
//! refresh token. (HTTP 401 will be returned if this happens.)
//!
//! ### Example
//!
//! This example uses `curl` to make request to the some (hypothetical) protected endpoint.
//! It requires [`jq`](https://stedolan.github.io/jq/) to parse JSON.
//!
//! ```bash
//! PROTECTED_RESOURCE="https://www.example.com/protected/resource/"
//!
//! # Save the response headers of our first request to the endpoint to get the Www-Authenticate
//! # header
//! RESPONSE_HEADER=$(tempfile);
//! curl --dump-header "${RESPONSE_HEADER}" "${PROTECTED_RESOURCE}"
//!
//! # Extract the realm, the service, and the scope from the Www-Authenticate header
//! WWWAUTH=$(cat "${RESPONSE_HEADER}" | grep "Www-Authenticate")
//! REALM=$(echo "${WWWAUTH}" | grep -o '\(realm\)="[^"]*"' | cut -d '"' -f 2)
//! SERVICE=$(echo "${WWWAUTH}" | grep -o '\(service\)="[^"]*"' | cut -d '"' -f 2)
//! SCOPE=$(echo "${WWWAUTH}" | grep -o '\(scope\)="[^"]*"' | cut -d '"' -f 2)
//!
//! # Build the URL to query the auth server
//! AUTH_URL="${REALM}?service=${SERVICE}&scope=${SCOPE}&offline_token=true"
//!
//! # Query the auth server to get a token -- replace the username and password
//! # below with the value from 1password
//! TOKEN=$(curl -s --user "mozart:password" "${AUTH_URL}")
//!
//! # Get the access token from the JSON string: {"token": "...."}
//! ACCESS_TOKEN=$(echo ${TOKEN} | jq .token | tr -d '"')
//!
//! # Query the resource again, but this time with a bearer token
//! curl -v -H "Authorization: Bearer ${ACCESS_TOKEN}" "${PROTECTED_RESOURCE}"
//!
//! # Get the refresh token
//! REFRESH_TOKEN=$(echo "${TOKEN}" | jq .refresh_token | tr -d '"')
//!
//! # Get a new access token
//! NEW_TOKEN=$(curl --header "Authorization: Bearer ${REFRESH_TOKEN}" "${AUTH_URL}")
//!
//! # Parse the new access token
//! NEW_ACCESS_TOKEN=$(echo "${TOKEN}" | jq .token | tr -d '"')
//!
//! # Query the resource again, but this time with a new access token
//! curl -v -H "Authorization: Bearer ${NEW_ACCESS_TOKEN}" "${PROTECTED_RESOURCE}"
//! ```
//!
//! ## Scope
//!
//! Not in use at the moment. Just use `all`.
//!
#![feature(proc_macro_hygiene, decl_macro)]
// See https://github.com/rust-unofficial/patterns/blob/master/anti_patterns/deny-warnings.md
#![allow(
legacy_directory_ownership,
missing_copy_implementations,
missing_debug_implementations,
unknown_lints,
unsafe_code,
intra_doc_link_resolution_failure
)]
#![deny(
const_err,
dead_code,
deprecated,
exceeding_bitshifts,
improper_ctypes,
missing_docs,
mutable_transmutes,
no_mangle_const_items,
non_camel_case_types,
non_shorthand_field_patterns,
non_upper_case_globals,
overflowing_literals,
path_statements,
plugin_as_library,
stable_features,
trivial_casts,
trivial_numeric_casts,
unconditional_recursion,
unknown_crate_types,
unreachable_code,
unused_allocation,
unused_assignments,
unused_attributes,
unused_comparisons,
unused_extern_crates,
unused_features,
unused_imports,
unused_import_braces,
unused_qualifications,
unused_must_use,
unused_mut,
unused_parens,
unused_results,
unused_unsafe,
unused_variables,
variant_size_differences,
warnings,
while_true
)]
#![doc(test(attr(allow(unused_variables), deny(warnings))))]
use biscuit as jwt;
use hyper;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use]
extern crate rocket;
// we are using the "log_!" macros which are redefined from `log`'s
use rocket_cors as cors;
#[macro_use]
extern crate serde_derive;
use serde_json;
#[cfg(test)]
extern crate serde_test;
#[macro_use]
mod macros;
#[cfg(test)]
#[macro_use]
mod test;
pub mod auth;
mod routes;
pub mod serde_custom;
pub mod token;
pub use self::routes::routes;
use std::error;
use std::fmt;
use std::io;
use std::ops::Deref;
use std::str::FromStr;
use ring::rand::SystemRandom;
use rocket::http::Status;
use rocket::response::{Responder, Response};
use rocket::Request;
use serde::de;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub use serde_json::Map as JsonMap;
pub use serde_json::Value as JsonValue;
/// Top level error enum
#[derive(Debug)]
pub enum Error {
/// A generic/unknown error
GenericError(String),
/// A bad request resulting from bad request parameters/headers
BadRequest(String),
/// Authentication error
Auth(auth::Error),
/// CORS error
CORS(cors::Error),
/// Token Error
Token(token::Error),
/// IO errors
IOError(io::Error),
/// An error launcing Rocket
LaunchError(rocket::error::LaunchError),
/// Unsupported operation
UnsupportedOperation,
}
impl_from_error!(auth::Error, Error::Auth);
impl_from_error!(cors::Error, Error::CORS);
impl_from_error!(token::Error, Error::Token);
impl_from_error!(String, Error::GenericError);
impl_from_error!(io::Error, Error::IOError);
impl_from_error!(rocket::error::LaunchError, Error::LaunchError);
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::UnsupportedOperation => "This operation is not supported",
Error::Auth(ref e) => e.description(),
Error::CORS(ref e) => e.description(),
Error::Token(ref e) => e.description(),
Error::IOError(ref e) => e.description(),<|fim▁hole|> Error::LaunchError(ref e) => e.description(),
Error::GenericError(ref e) | Error::BadRequest(ref e) => e,
}
}
fn cause(&self) -> Option<&dyn error::Error> {
match *self {
Error::Auth(ref e) => Some(e),
Error::CORS(ref e) => Some(e),
Error::Token(ref e) => Some(e),
Error::IOError(ref e) => Some(e),
Error::LaunchError(ref e) => Some(e),
Error::UnsupportedOperation | Error::GenericError(_) | Error::BadRequest(_) => {
Some(self)
}
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::UnsupportedOperation => write!(f, "{}", error::Error::description(self)),
Error::Auth(ref e) => fmt::Display::fmt(e, f),
Error::CORS(ref e) => fmt::Display::fmt(e, f),
Error::Token(ref e) => fmt::Display::fmt(e, f),
Error::IOError(ref e) => fmt::Display::fmt(e, f),
Error::GenericError(ref e) => fmt::Display::fmt(e, f),
Error::LaunchError(ref e) => fmt::Display::fmt(e, f),
Error::BadRequest(ref e) => fmt::Display::fmt(e, f),
}
}
}
impl<'r> Responder<'r> for Error {
fn respond_to(self, request: &Request<'_>) -> Result<Response<'r>, Status> {
match self {
Error::Auth(e) => e.respond_to(request),
Error::CORS(e) => e.respond_to(request),
Error::Token(e) => e.respond_to(request),
Error::BadRequest(e) => {
error_!("{}", e);
Err(Status::BadRequest)
}
e => {
error_!("{}", e);
Err(Status::InternalServerError)
}
}
}
}
/// Wrapper around `hyper::Url` with `Serialize` and `Deserialize` implemented
#[derive(Clone, Eq, PartialEq, Hash, Debug)]
pub struct Url(hyper::Url);
impl_deref!(Url, hyper::Url);
impl FromStr for Url {
type Err = hyper::error::ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Url(hyper::Url::from_str(s)?))
}
}
impl fmt::Display for Url {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.as_str())
}
}
impl Serialize for Url {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.0.as_str())
}
}
impl<'de> Deserialize<'de> for Url {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct UrlVisitor;
impl<'de> de::Visitor<'de> for UrlVisitor {
type Value = Url;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a valid URL string")
}
fn visit_string<E>(self, value: String) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(Url(
hyper::Url::from_str(&value).map_err(|e| E::custom(e.to_string()))?
))
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(Url(
hyper::Url::from_str(value).map_err(|e| E::custom(e.to_string()))?
))
}
}
deserializer.deserialize_string(UrlVisitor)
}
}
/// A sequence of bytes, either as an array of unsigned 8 bit integers, or a string which will be
/// treated as UTF-8.
/// This enum is (de)serialized [`untagged`](https://serde.rs/enum-representations.html).
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)]
#[serde(untagged)]
pub enum ByteSequence {
/// A string which will be converted to UTF-8 and then to bytes.
String(String),
/// A sequence of unsigned 8 bits integers which will be treated as bytes.
Bytes(Vec<u8>),
}
impl ByteSequence {
/// Returns the byte sequence.
pub fn as_bytes(&self) -> Vec<u8> {
match *self {
ByteSequence::String(ref string) => string.to_string().into_bytes(),
ByteSequence::Bytes(ref bytes) => bytes.to_vec(),
}
}
}
/// Application configuration. Usually deserialized from JSON for use.
///
/// The type parameter `B` is the [`auth::AuthenticatorConfiguration`] and by its associated
/// type, the `Authenticator` that is going to be used for HTTP Basic Authentication.
///
/// # Examples
/// ```
/// extern crate rowdy;
/// extern crate serde_json;
///
/// use rowdy::Configuration;
/// use rowdy::auth::NoOpConfiguration;
///
/// # fn main() {
/// // We are using the `NoOp` authenticator
/// let json = r#"{
/// "token" : {
/// "issuer": "https://www.acme.com",
/// "allowed_origins": { "Some": ["https://www.example.com", "https://www.foobar.com"] },
/// "audience": ["https://www.example.com", "https://www.foobar.com"],
/// "signature_algorithm": "RS256",
/// "secret": {
/// "rsa_private": "test/fixtures/rsa_private_key.der",
/// "rsa_public": "test/fixtures/rsa_public_key.der"
/// },
/// "expiry_duration": 86400
/// },
/// "basic_authenticator": {}
/// }"#;
/// let config: Configuration<NoOpConfiguration> = serde_json::from_str(json).unwrap();
/// let rocket = config.ignite().unwrap().mount("/", rowdy::routes());
/// // then `rocket.launch()`!
/// # }
/// ```
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Configuration<B> {
/// Token configuration. See the type documentation for deserialization examples
pub token: token::Configuration,
/// The configuration for the authenticator that will handle HTTP Basic Authentication.
pub basic_authenticator: B,
}
impl<B: auth::AuthenticatorConfiguration<auth::Basic>> Configuration<B> {
/// Ignites the rocket with various configuration objects, but does not mount any routes.
/// Remember to mount routes and call `launch` on the returned Rocket object.
/// See the struct documentation for an example.
pub fn ignite(&self) -> Result<rocket::Rocket, Error> {
let token_getter_cors_options = self.token.cors_option();
let basic_authenticator = self.basic_authenticator.make_authenticator()?;
let basic_authenticator: Box<auth::BasicAuthenticator> = Box::new(basic_authenticator);
// Prepare the keys
let keys = self.token.keys()?;
Ok(rocket::ignite()
.manage(self.token.clone())
.manage(basic_authenticator)
.manage(keys)
.attach(token_getter_cors_options))
}
}
/// Convenience function to ignite and launch rowdy. This function will never return
///
/// # Panics
/// Panics if during the Rocket igition, something goes wrong.
///
/// # Example
/// ```rust,no_run
/// extern crate rowdy;
/// extern crate serde_json;
///
/// use rowdy::Configuration;
/// use rowdy::auth::NoOpConfiguration;
///
/// # fn main() {
/// // We are using the `NoOp` authenticator
/// let json = r#"{
/// "token" : {
/// "issuer": "https://www.acme.com",
/// "allowed_origins": ["https://www.example.com", "https://www.foobar.com"],
/// "audience": ["https://www.example.com", "https://www.foobar.com"],
/// "signature_algorithm": "RS256",
/// "secret": {
/// "rsa_private": "test/fixtures/rsa_private_key.der",
/// "rsa_public": "test/fixtures/rsa_public_key.der"
/// },
/// "expiry_duration": 86400
/// },
/// "basic_authenticator": {}
/// }"#;
/// let config: Configuration<NoOpConfiguration> = serde_json::from_str(json).unwrap();
///
/// rowdy::launch(config);
/// # }
/// ```
pub fn launch<B: auth::AuthenticatorConfiguration<auth::Basic>>(
config: Configuration<B>,
) -> rocket::error::LaunchError {
let rocket = config.ignite().unwrap_or_else(|e| panic!("{}", e));
rocket.mount("/", routes()).launch()
}
/// Return a psuedo random number generator
pub(crate) fn rng() -> &'static SystemRandom {
use std::ops::Deref;
lazy_static! {
static ref RANDOM: SystemRandom = SystemRandom::new();
}
RANDOM.deref()
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use serde_test::{assert_tokens, Token};
use super::*;
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
struct TestUrl {
url: Url,
}
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
struct TestClaims {
company: String,
department: String,
}
impl Default for TestClaims {
fn default() -> Self {
TestClaims {
company: "ACME".to_string(),
department: "Toilet Cleaning".to_string(),
}
}
}
#[test]
fn url_serialization_token_round_trip() {
let test = TestUrl {
url: not_err!(Url::from_str("https://www.example.com/")),
};
assert_tokens(
&test,
&[
Token::Struct {
name: "TestUrl",
len: 1,
},
Token::Str("url"),
Token::Str("https://www.example.com/"),
Token::StructEnd,
],
);
}
}<|fim▁end|> | |
<|file_name|>emitResultTests.ts<|end_file_name|><|fim▁begin|>import { expect } from "chai";
import { EmitResult } from "../../../compiler";
import { Project } from "../../../Project";
import * as testHelpers from "../../testHelpers";
describe(nameof(EmitResult), () => {
it("should get the emit result when there are no errors", async () => {
const fileSystem = testHelpers.getFileSystemHostWithFiles([]);
const project = new Project({ compilerOptions: { noLib: true, outDir: "dist" }, fileSystem });
project.createSourceFile("file1.ts", "const num1 = 1;");<|fim▁hole|> expect(result.compilerObject).to.not.be.undefined;
expect(result.getEmitSkipped()).to.be.false;
expect(result.getDiagnostics().length).to.equal(0);
});
it("should get the emit result when there are errors", async () => {
const fileSystem = testHelpers.getFileSystemHostWithFiles([]);
const project = new Project({ compilerOptions: { noLib: true, outDir: "dist", noEmitOnError: true }, fileSystem });
project.createSourceFile("file1.ts", "const num1;");
const result = await project.emit();
expect(result.getEmitSkipped()).to.be.true;
const diagnostics = result.getDiagnostics();
const filteredDiagnostics = diagnostics.map(d => d.getMessageText()).filter(d => (d as string).indexOf("Cannot find global type"));
expect(filteredDiagnostics.length).to.equal(1);
expect(filteredDiagnostics[0]).to.equal("'const' declarations must be initialized.");
});
});<|fim▁end|> | project.createSourceFile("file2.ts", "const num2 = 2;");
const result = await project.emit(); |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::fmt::Display;
#[derive(Debug)]
struct ImportantExcerpt<'a> {
part: &'a str // if hold ref, need lifetime anno
}
impl<'a> ImportantExcerpt<'a> {
fn level(&self) -> i32 {
3
}
// one of the param is self, so all return
// lifetime is same to self
fn announce_and_return_part(&self, announcement: &str) -> &str {
println!("Attention please: {}", announcement);
self.part
}
}
fn main() {
// --- lifetime prevent dangling ref
/*
let r;
{ // x not live long enough
let x = 5;
r = &x;
}
println!("r: {}", r);
*/
// ---
let string1 = String::from("abcd");
let string2 = "xyz";
let result = longest(string1.as_str(), string2);
println!("Then longest string is {}", result);
// --- how borrow checker helps after adding lifetime
// annotations
/*
let s1 = String::from("long string is long");
let result;
{
let s2 = String::from("xyz");
result = longest(s1.as_str(), s2.as_str());
}
println!("Then longest string is {}", result);
*/
let novel = String::from("Call me Ishmael. Some years ago...");
let first_sentence = novel.split('.')
.next()
.expect("Could not find a '.'");
let i = ImportantExcerpt {part: first_sentence};<|fim▁hole|> // entire duration of the program
let s: &'static str = "I have a static lifetime.";
}
// compiler don't know if the result is ref to str1 or str2,
// so the borrow checker don't know enough info to check
// anything here. We programmers need to express our intentions
// mannually, to let borrow checker help us
// fn longest(str1: &str, str2: &str) -> &str {
// since scope is always nested, will be the shortest of str1 and str2
// need to make the lifetime of result relative to at least
// one of the args
fn longest<'a>(str1: &'a str, str2: &'a str) -> &'a str {
if str1.len() > str2.len() {
str1
} else {
str2
}
}
fn longest_with_an_announcement<'a, T>(x: &'a str, y: &'a str, ann: T) -> &'a str
where T: Display
{
println!("Announcement! {}", ann);
if x.len() > y.len() {
x
} else {
y
}
}<|fim▁end|> | |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for twit 2.2
// Project: https://github.com/ttezel/twit
// Definitions by: Volox <https://github.com/Volox>
// sapphiredev <https://github.com/sapphiredev>
// abraham <https://github.com/abraham>
// siwalik <https://github.com/siwalikm>
// plhery <https://github.com/plhery>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
/// <reference types="node" />
/// <reference types="geojson" />
declare module 'twit' {
import { IncomingMessage } from 'http';
import { EventEmitter } from 'events';
namespace Twit {
export type StreamEndpoint = 'statuses/filter' | 'statuses/sample' | 'statuses/firehose' | 'user' | 'site';
export namespace Twitter {
export type ResultType = 'mixed' | 'popular' | 'recent';
/**
* @see https://dev.twitter.com/overview/api/tweets#obj-contributors
*/
export interface Contributors {
id: number,
id_str: string,
screen_name: string,
}
/**
* @see https://dev.twitter.com/overview/api/entities
*/
export interface HashtagEntity {
indices: [number, number],
text: string,
}
export interface Size {
h: number,
w: number,
resize: 'crop' | 'fit',
}
export interface Sizes {
thumb: Size,
large: Size,
medium: Size,
small: Size,
}
export interface MediaEntity {
id: number,
id_str: string,
indices: [number, number],
url: string,
display_url: string,
expanded_url: string,
media_url: string,
media_url_https: string,
sizes: Sizes,
source_status_id: number,
source_status_id_str: string,
type: string,
}
export interface UrlEntity {
url: string,
display_url: string,
expanded_url: string,
indices: [number, number],
}
export interface UserMentionEntity {
id: number,
id_str: string,
indices: [number, number],
name: string,
screen_name: string,
}
export interface Entities {
hashtags: HashtagEntity[],
media: MediaEntity[],
urls: UrlEntity[],
user_mentions: UserMentionEntity[],
}
/**
* @see https://dev.twitter.com/overview/api/users
*/
export interface User {
contributors_enabled: boolean,
created_at: string,
default_profile: string,
default_profile_image: string,
description: string,
entities: Entities,
favourites_count: number,
follow_request_sent?: boolean,
following?: boolean,
followers_count: number,
friends_count: number,
geo_enabled?: boolean,
id: number,
id_str: string,
is_translator?: boolean,
lang: string,
listed_count: number,
location: string,
name: string,
notifications?: boolean,
profile_background_color: string,
profile_background_image_url: string,
profile_background_image_url_https: string,
profile_background_tile: boolean,
profile_banner_url: string,
profile_image_url: string,
profile_image_url_https: string,
profile_link_color: string,
profile_sidebar_border_color: string,
profile_sidebar_fill_color: string,
profile_text_color: string,
profile_use_background_image: boolean,
protected: boolean,
screen_name: string,
show_all_inline_media: boolean,
status?: Status,
statuses_count: number,
time_zone?: string,
url: string,
utc_offset?: number,
verified: boolean,
withheld_in_countries: string,
withheld_scope: string,
}
/**
* @see https://dev.twitter.com/overview/api/places
*/
export interface PlaceAttribute {
street_address: string,
locality: string,
region: string,
iso3: string,
postal_code: string,
phone: string,
twitter: string,
url: string,
'app:id': string,
}
export interface Place {
geometry: GeoJSON.Point,
attributes: PlaceAttribute,
bounding_box: GeoJSON.Polygon,
contained_within: Place[],
country: string,<|fim▁hole|> place_type: string,
url: string,
}
/**
* @see https://dev.twitter.com/overview/api/tweets
*/
export interface Status {
id: number,
id_str: string,
annotations?: Object,
contributors?: Contributors[],
coordinates?: GeoJSON.Point,
created_at: string,
current_user_retweet?: {
id: number,
id_str: string,
},
entities: Entities,
favorite_count?: number,
favorited?: boolean,
filter_level: 'none' | 'low' | 'medium',
geo?: Object,
in_reply_to_screen_name?: string,
in_reply_to_status_id?: number,
in_reply_to_status_id_str?: string,
in_reply_to_user_id?: number,
in_reply_to_user_id_str?: string,
lang?: string,
place?: Place,
possibly_sensitive?: boolean,
quoted_status_id?: number,
quoted_status_id_str?: string,
quoted_status?: Status,
scopes?: Object,
retweet_count: number,
retweeted: boolean,
retweeted_status?: Status,
source?: string,
text?: string,
full_text?: string,
truncated: boolean,
user: User,
withheld_copyright?: boolean,
withheld_in_countries?: string[],
withheld_scope?: string,
display_text_range?: [number, number],
}
export interface Metadata {
max_id?: number,
since_id?: number,
refresh_url?: string,
next_results?: string,
count?: number,
completed_in?: number,
since_id_str?: string,
query?: string,
max_id_str?: string
}
export interface Errors {
errors: {
code: number
message: string
}[]
}
export interface SearchResults {
statuses: Twitter.Status[],
search_metadata: Twitter.Metadata,
}
}
export type Response = object
interface MediaParam {
file_path: string
}
interface Params {
// search/tweets
q?: string,
geocode?: string,
lang?: string,
locale?: string,
result_type?: Twitter.ResultType,
count?: number,
results_per_page?: number,
until?: string,
since_id?: string,
max_id?: string,
include_entities?: boolean,
// Other params from various endpoints
track?: string | string[],
media_id?: string,
media_ids?: string[],
alt_text?: {
text?: string
},
media_data?: Buffer | string,
screen_name?: string,
id?: string,
slug?: string,
owner_screen_name?: string,
status?: string,
user_id?: number | string,
lat?: number,
long?: number,
follow?: boolean | string,
include_email?: boolean,
cursor?: number | string,
tweet_mode?: string,
trim_user?: boolean,
exclude_replies?: boolean,
include_rts?: boolean,
skip_status?: boolean,
url?: string,
include_user_entities?: boolean,
stringify_ids?: boolean,
}
export interface PromiseResponse {
data: Response,
resp: IncomingMessage,
}
export interface Callback {
(err: Error, result: Response, response: IncomingMessage): void
}
export interface ConfigKeys {
consumer_key: string,
consumer_secret: string,
access_token?: string,
access_token_secret?: string,
}
export interface Options extends ConfigKeys {
app_only_auth?: boolean,
timeout_ms?: number,
trusted_cert_fingerprints?: string[],
}
export interface Stream extends EventEmitter {
start(): void;
stop(): void;
}
}
class Twit {
/**
* @see https://github.com/ttezel/twit#var-t--new-twitconfig
*/
constructor(config: Twit.Options);
/**
* @see https://github.com/ttezel/twit#tgetpath-params-callback
*/
get(path: string, callback: Twit.Callback): void;
get(path: string, params: Twit.Params, callback: Twit.Callback): void;
get(path: string, params?: Twit.Params): Promise<Twit.PromiseResponse>;
/**
* @see https://github.com/ttezel/twit#tpostpath-params-callback
*/
post(path: string, callback: Twit.Callback): void;
post(path: string, params: Twit.Params, callback: Twit.Callback): void;
post(path: string, params?: Twit.Params): Promise<Twit.PromiseResponse>;
/**
* @see https://github.com/ttezel/twit#tpostmediachunkedparams-callback
*/
postMediaChunked(media: Twit.MediaParam, callback: Twit.Callback): void;
/**
* @see https://github.com/ttezel/twit#tgetauth
*/
getAuth(): Twit.Options
/**
* @see https://github.com/ttezel/twit#tsetauthtokens
*/
setAuth(tokens: Twit.ConfigKeys): void
/**
* @see https://github.com/ttezel/twit#tstreampath-params
*/
stream(path: Twit.StreamEndpoint, params?: Twit.Params): Twit.Stream;
}
export = Twit;
}<|fim▁end|> | country_code: string,
full_name: string,
id: string,
name: string, |
<|file_name|>InheritingTextFormat.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2001-2013 Aspose Pty Ltd. All Rights Reserved.
*
* This file is part of Aspose.Pdf. The source code in this file
* is only intended as a supplement to the documentation, and is provided
* "as is", without warranty of any kind, either expressed or implied.
*/
package programmersguide.workingwithasposepdfgenerator.workingwithtext.textformatting.inheritingtextformat.java;
import com.aspose.pdf.*;<|fim▁hole|>public class InheritingTextFormat
{
public static void main(String[] args) throws Exception
{
// The path to the documents directory.
String dataDir = "src/programmersguide/workingwithasposepdfgenerator/workingwithtext(generator)/textformatting/inheritingtextformat/data/";
}
}<|fim▁end|> | |
<|file_name|>calendar.js<|end_file_name|><|fim▁begin|>(function($, utils, $HELPER){
var $app = window.vgrome;
$app.controller('CalendarCtrl', ['$scope', '$compile', 'lang', 'apiProvider', '$controller',
function ($scope, $compile, $lang, $apiProvider, $controller) {
$.extend(this, $controller('EntityCtrl', {$scope: $scope}));
$scope.trans = $lang.translate;
window.calendarCtrl = $scope;
$scope.mode = 'index';
$scope.focusFields = { events: [], tasks: [] };
$scope.focusObject = 'Calendar';
$scope.focusId = '';
$scope.listViewData = { events: [], tasks: [] };
$scope.listViewHeaderColumns = {};
$scope.enableShowRefList = enableShowRefList = function(originObject, originId) {
$scope.listViewData = { events: [], tasks: [] };
$scope.listViewHeaderColumns = { events: userdata.search_config['Events'], tasks: userdata.search_config['Calendar'] };
$apiProvider.findListRef(originObject, originId, 'Calendar', $scope.listViewHeaderColumns, function(result) {
utils.log(originObject+':explode ref list('+$scope.focusObject+'):' + $scope.focusId);
if(result.success) {<|fim▁hole|> _.each($scope.listViewHeaderColumns['tasks'], function(field) {
var recordValue = record[field.name];
if($HELPER.inArray(field.name, ['date_start', 'due_date'])) {
if(field.name == 'date_start') {
recordValue += ' ' + record['time_start'];
} else if(field.name == 'due_date') {
recordValue += ' ' + record['time_end'];
}
}
data[field.name] = $HELPER.formatValueByField(field, recordValue);
if(arrayContains(field.type.name, ['reference', 'owner'])) {
data[field.name + '_display'] = record[field.name + '_display'];
}
});
data['id'] = record.id;
$scope.listViewData['tasks'].push(data);
});
_.each(result.records['events'], function(record) {
var data = {};
_.each($scope.listViewHeaderColumns['events'], function(field) {
var recordValue = record[field.name];
if($HELPER.inArray(field.name, ['date_start', 'due_date'])) {
if(field.name == 'date_start') {
recordValue += ' ' + record['time_start'];
} else if(field.name == 'due_date') {
recordValue += ' ' + record['time_end'];
}
}
data[field.name] = $HELPER.formatValueByField(field, recordValue);
if(arrayContains(field.type.name, ['reference', 'owner'])) {
data[field.name + '_display'] = record[field.name + '_display'];
}
});
data['id'] = record.id;
$scope.listViewData['events'].push(data);
});
$scope.switchMode('ref-list');
utils.hideLoading();
} else {
utils.handleError(result, 'Calendar');
utils.hideLoading();
}
});
};
$scope.submitCreate = submitCreate = function() {
var templateFields = $scope.focusFields;
var post = {};
_.each(templateFields, function(block){
_.each(block.items, function(field){
if(field.uitype.name == 'boolean') {
var value = $('.create-section #inp_'+$scope.focusObject+'_'+field.name).prop( "checked" );
value = value ? 1 : 0;
} else {
var value = $('.create-section #inp_'+$scope.focusObject+'_'+field.name).val();
}
if(field.uitype.name == 'datetime' && arrayContains(field.name, ['date_start', 'due_date'])) {
if(field.name == 'date_start') {
var subData = { time: $('.create-section #inp_'+$scope.focusObject+'_time_start').val() };
} else if(field.name == 'due_date') {
var subData = { time: $('.create-section #inp_'+$scope.focusObject+'_time_end').val() };
}
value = $HELPER.formatToVTFormat(field, value, subData);
} else {
value = $HELPER.formatToVTFormat(field, value);
}
post[field.name] = value;
});
});
post['time_start'] = post['date_start'][1];
post['date_start'] = post['date_start'][0];
post['time_end'] = post['due_date'][1];
post['due_date'] = post['due_date'][0];
if($scope.focusObject == 'Calendar') {
if(empty(post['activitytype'])) {
post['activitytype'] = 'Task';
}
if(empty(post['visibility'])) {
post['visibility'] = 'Public';
}
} else if($scope.focusObject == 'Events') {
if(empty(post['activitytype'])) {
post['activitytype'] = 'Call';
}
if(empty(post['visibility'])) {
post['visibility'] = 'Public';
}
}
//Calculate duration hours and minutes
utils.showLoading();
$apiProvider.createObject($scope.focusObject, post, function(result){
if(result.success) {
var record = result.record;
utils.hideLoading();
$scope.enableShowDetail(record.id);
$scope.needToReloadRefList = true;
} else {
utils.handleError(result, 'Calendar');
utils.hideLoading();
}
});
};
$scope.submitEdit = submitEdit = function(){
var templateFields = $scope.focusFields;
var post = {};
_.each(templateFields, function(block){
_.each(block.items, function(field){
if(field.uitype.name == 'boolean') {
var value = $('.edit-section #inp_'+$scope.focusObject+'_'+field.name).prop( "checked" );
value = value ? 1 : 0;
} else {
var value = $('.edit-section #inp_'+$scope.focusObject+'_'+field.name).val();
}
if(field.uitype.name == 'datetime' && arrayContains(field.name, ['date_start', 'due_date'])) {
if(field.name == 'date_start') {
var subData = { time: $('.edit-section #inp_'+$scope.focusObject+'_time_start').val() };
} else if(field.name == 'due_date') {
var subData = { time: $('.edit-section #inp_'+$scope.focusObject+'_time_end').val() };
}
value = $HELPER.formatToVTFormat(field, value, subData);
} else {
value = $HELPER.formatToVTFormat(field, value);
}
if(value != '') {
post[field.name] = value;
}
});
});
post['time_start'] = post['date_start'][1];
post['date_start'] = post['date_start'][0];
post['time_end'] = post['due_date'][1];
post['due_date'] = post['due_date'][0];
if($scope.focusObject == 'Calendar') {
if(empty(post['activitytype'])) {
post['activitytype'] = 'Task';
}
if(empty(post['visibility'])) {
post['visibility'] = 'Public';
}
} else if($scope.focusObject == 'Events') {
if(empty(post['activitytype'])) {
post['activitytype'] = 'Call';
}
if(empty(post['visibility'])) {
post['visibility'] = 'Public';
}
}
if($scope.focusObject == 'Calendar' || $scope.focusObject == 'Events') {
if(!empty(window.summaryData) && window.summaryData.type == 'Leads') {
post['parent_id'] = summaryData.id;
}
}
utils.showLoading();
$apiProvider.updateObject($scope.focusObject, $scope.focusId, post, function(result){
if(result.success) {
var record = result.record;
utils.hideLoading();
$scope.enableShowDetail(record.id);
} else {
utils.handleError(result, $scope.focusObject);
utils.hideLoading();
}
});
};
$scope.backHistory = backHistory = function () {
if(window.summaryData) {
$scope.mode = 'ref-list';
if(summaryData && $scope.needToReloadRefList) {
utils.showLoading('Reloading data...');
$scope.enableShowRefList(summaryData.type, summaryData.id);
$scope.needToReloadRefList = false;
}
} else {
$scope.mode = 'index';
}
};
}]);
})(jQuery, window.UTILS, window.VTEHelperInstance);<|fim▁end|> | _.each(result.records['tasks'], function(record) {
var data = {}; |
<|file_name|>746.js<|end_file_name|><|fim▁begin|>var __v=[
{
"Id": 2329,<|fim▁hole|> "Sort": 0
}
]<|fim▁end|> | "Chapter": 746,
"Name": "nodejs", |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | // english/mod.rs
pub mod greetings; // 查找当前目录下的greetings.rs或者greetings目录下的mod.rs
pub mod farewells; // 查找当前目录下的farewells.rs或者farewells目录下的mod.rs |
<|file_name|>generi_in_istituzioni.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from collections import OrderedDict
import locale
from optparse import make_option
from verify.management.commands import VerifyBaseCommand
from verify.models import *
from verify.politici_models import *
from django.db.models import Q, Count
__author__ = 'guglielmo'
class Command(VerifyBaseCommand):
"""
Report delle statistiche di genere complessive, a livello nazionale,
per tutti gli organi di tutte le istituzioni.
Può limitarsi a una o più istituzioni, se si passa un elenco di institution_id
"""
args = '<institution_id institution_id ...>'
help = "Check that all locations have only male components (list locations with female components)."
option_list = VerifyBaseCommand.option_list
def execute_verification(self, *args, **options):
self.csv_headers = ["ISTITUZIONE", "INCARICO", "N_DONNE", "N_UOMINI", "N_TOTALI", "PERC_DONNE", "PERC_UOMINI"]
institutions = OpInstitution.objects.using('politici').all()
if args:
institutions = institutions.filter(id__in=args)
self.logger.info(
"Verification {0} launched with institutions limited to {1}".format(
self.__class__.__module__, ",".join(institutions.values_list('id', flat=True))
)
)
else:
self.logger.info(
"Verification {0} launched for all institutions".format(
self.__class__.__module__
)
)
self.ok_locs = []
self.ko_locs = []
for institution in institutions:
charge_types_ids = OpInstitutionCharge.objects.using('politici').\
filter(date_end__isnull=True,
content__deleted_at__isnull=True).\
filter(institution=institution).\
values_list('charge_type', flat=True).\
distinct()
charge_types = OpChargeType.objects.using('politici').\
filter(id__in=charge_types_ids)
for charge_type in charge_types:
self.logger.info(
"Counting {0} in {1}".format(
charge_type.name, institution.name<|fim▁hole|> )
qs = OpInstitutionCharge.objects.using('politici').\
filter(date_end__isnull=True,
content__deleted_at__isnull=True).\
filter(institution=institution,
charge_type=charge_type)
n_tot = qs.count()
n_fem = qs.filter(politician__sex__iexact='f').count()
n_mal = n_tot - n_fem
merged = [institution.name, charge_type.name, n_fem, n_mal, n_tot,]
merged.append(locale.format("%.2f",100. * n_fem / float(n_tot) ))
merged.append(locale.format("%.2f",100. * n_mal / float(n_tot) ))
self.ko_locs.append(merged)
outcome = Verification.OUTCOME.failed
self.logger.info(
"Report for {0} institutions generated.".format(
len(self.ko_locs)
)
)
return outcome<|fim▁end|> | ) |
<|file_name|>data_source_details_influx.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016, 2018, 2021, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
// Oracle Cloud AI Services API
//
// OCI AI Service solutions can help Enterprise customers integrate AI into their products immediately by using our proven,
// pre-trained/custom models or containers, and without a need to set up in house team of AI and ML experts.
// This allows enterprises to focus on business drivers and development work rather than AI/ML operations, shortening the time to market.
//
package aianomalydetection
import (
"encoding/json"
"github.com/oracle/oci-go-sdk/v46/common"
)
// DataSourceDetailsInflux Data Source details for influx.
type DataSourceDetailsInflux struct {
VersionSpecificDetails InfluxDetails `mandatory:"true" json:"versionSpecificDetails"`<|fim▁hole|>
// Username for connection to Influx
UserName *string `mandatory:"true" json:"userName"`
// Password Secret Id for the influx connection
PasswordSecretId *string `mandatory:"true" json:"passwordSecretId"`
// Measurement name for influx
MeasurementName *string `mandatory:"true" json:"measurementName"`
// public IP address and port to influx DB
Url *string `mandatory:"true" json:"url"`
}
func (m DataSourceDetailsInflux) String() string {
return common.PointerString(m)
}
// MarshalJSON marshals to json representation
func (m DataSourceDetailsInflux) MarshalJSON() (buff []byte, e error) {
type MarshalTypeDataSourceDetailsInflux DataSourceDetailsInflux
s := struct {
DiscriminatorParam string `json:"dataSourceType"`
MarshalTypeDataSourceDetailsInflux
}{
"INFLUX",
(MarshalTypeDataSourceDetailsInflux)(m),
}
return json.Marshal(&s)
}
// UnmarshalJSON unmarshals from json
func (m *DataSourceDetailsInflux) UnmarshalJSON(data []byte) (e error) {
model := struct {
VersionSpecificDetails influxdetails `json:"versionSpecificDetails"`
UserName *string `json:"userName"`
PasswordSecretId *string `json:"passwordSecretId"`
MeasurementName *string `json:"measurementName"`
Url *string `json:"url"`
}{}
e = json.Unmarshal(data, &model)
if e != nil {
return
}
var nn interface{}
nn, e = model.VersionSpecificDetails.UnmarshalPolymorphicJSON(model.VersionSpecificDetails.JsonData)
if e != nil {
return
}
if nn != nil {
m.VersionSpecificDetails = nn.(InfluxDetails)
} else {
m.VersionSpecificDetails = nil
}
m.UserName = model.UserName
m.PasswordSecretId = model.PasswordSecretId
m.MeasurementName = model.MeasurementName
m.Url = model.Url
return
}<|fim▁end|> | |
<|file_name|>with-better-colors.js<|end_file_name|><|fim▁begin|>import expect from 'expect'
import colorsCSS from 'colors.css'
import Color from '../src/with-better-colors'
describe('colors.css support', () => {
it('works as expected', () => {
for (const name in colorsCSS) {
if (colorsCSS.hasOwnProperty(name)) {<|fim▁hole|> .toBe(Color(colorsCSS[name]).toString())
}
}
})
})<|fim▁end|> | expect(Color(name).toString()) |
<|file_name|>Lab4_2.java<|end_file_name|><|fim▁begin|>public class Lab4_2
{
public static void main(String[] arg)
{
System.out.print(" * |");
for(int x=1;x<=12;x++) {
if (x<10) System.out.print(' ');
if (x<100) System.out.print(' ');
System.out.print(x+" ");
}
System.out.println();
for(int x=1;x<=21;x++) {
if (x<10) System.out.print('-');
System.out.print("--");
}
System.out.println();<|fim▁hole|> for(int i=1;i<=12;i++)
{
if (i<10) System.out.print(' ');
System.out.print(i+" |");
for(int j=1;j<=12;j++) {
if (i*j<10) System.out.print(' ');
if (j*i<100) System.out.print(' ');
System.out.print(j*i+" ");
}
System.out.println();
}
System.out.println();
System.out.println();
System.out.print(" + |");
for(int x=1;x<=12;x++) {
if (x<10) System.out.print(' ');
if (x<100) System.out.print(' ');
System.out.print(x+" ");
}
System.out.println();
for(int x=1;x<=21;x++) {
if (x<10) System.out.print('-');
System.out.print("--");
}
System.out.println();
for(int i=1;i<=12;i++)
{
if (i<10) System.out.print(' ');
System.out.print(i+" |");
for(int j=1;j<=12;j++) {
if (i+j<10) System.out.print(' ');
if (j+i<100) System.out.print(' ');
System.out.print(j+i+" ");
}
System.out.println();
}
}
}<|fim▁end|> | |
<|file_name|>eigen.py<|end_file_name|><|fim▁begin|># This file is not meant for public use and will be removed in SciPy v2.0.0.
# Use the `scipy.sparse.linalg` namespace for importing the functions
# included below.
import warnings
from . import _eigen
__all__ = [ # noqa: F822
'ArpackError', 'ArpackNoConvergence',
'eigs', 'eigsh', 'lobpcg', 'svds', 'arpack', 'test'
]
eigen_modules = ['arpack']
def __dir__():<|fim▁hole|>
def __getattr__(name):
if name not in __all__ and name not in eigen_modules:
raise AttributeError(
"scipy.sparse.linalg.eigen is deprecated and has no attribute "
f"{name}. Try looking in scipy.sparse.linalg instead.")
if name in eigen_modules:
msg = (f'The module `scipy.sparse.linalg.eigen.{name}` is '
'deprecated. All public names must be imported directly from '
'the `scipy.sparse.linalg` namespace.')
else:
msg = (f"Please use `{name}` from the `scipy.sparse.linalg` namespace,"
" the `scipy.sparse.linalg.eigen` namespace is deprecated.")
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return getattr(_eigen, name)<|fim▁end|> | return __all__ |
<|file_name|>quadratic.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2015, Mikhail Vorotilov
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use super::super::FloatType;
use super::super::Roots;
/// Solves a quadratic equation a2*x^2 + a1*x + a0 = 0.
///
/// In case two roots are present, the first returned root is less than the second one.
///
/// # Examples
///
/// ```
/// use roots::Roots;
/// use roots::find_roots_quadratic;
///
/// let no_roots = find_roots_quadratic(1f32, 0f32, 1f32);
/// // Returns Roots::No([]) as 'x^2 + 1 = 0' has no roots
///
/// let one_root = find_roots_quadratic(1f64, 0f64, 0f64);
/// // Returns Roots::One([0f64]) as 'x^2 = 0' has one root 0
///
/// let two_roots = find_roots_quadratic(1f32, 0f32, -1f32);
/// // Returns Roots::Two([-1f32,1f32]) as 'x^2 - 1 = 0' has roots -1 and 1
/// ```
pub fn find_roots_quadratic<F: FloatType>(a2: F, a1: F, a0: F) -> Roots<F> {
// Handle non-standard cases
if a2 == F::zero() {
// a2 = 0; a1*x+a0=0; solve linear equation
super::linear::find_roots_linear(a1, a0)
} else {
let _2 = F::from(2i16);
let _4 = F::from(4i16);
// Rust lacks a simple way to convert an integer constant to generic type F
let discriminant = a1 * a1 - _4 * a2 * a0;
if discriminant < F::zero() {
Roots::No([])
} else {
let a2x2 = _2 * a2;
if discriminant == F::zero() {
Roots::One([-a1 / a2x2])
} else {
// To improve precision, do not use the smallest divisor.
// See https://people.csail.mit.edu/bkph/articles/Quadratics.pdf
let sq = discriminant.sqrt();
let (same_sign, diff_sign) = if a1 < F::zero() {
(-a1 + sq, -a1 - sq)
} else {
(-a1 - sq, -a1 + sq)
};
let (x1, x2) = if same_sign.abs() > a2x2.abs() {
let a0x2 = _2 * a0;
if diff_sign.abs() > a2x2.abs() {
// 2*a2 is the smallest divisor, do not use it
(a0x2 / same_sign, a0x2 / diff_sign)
} else {
// diff_sign is the smallest divisor, do not use it
(a0x2 / same_sign, same_sign / a2x2)<|fim▁hole|> };
// Order roots
if x1 < x2 {
Roots::Two([x1, x2])
} else {
Roots::Two([x2, x1])
}
}
}
}
}
#[cfg(test)]
mod test {
use super::super::super::*;
#[test]
fn test_find_roots_quadratic() {
assert_eq!(find_roots_quadratic(0f32, 0f32, 0f32), Roots::One([0f32]));
assert_eq!(find_roots_quadratic(1f32, 0f32, 1f32), Roots::No([]));
assert_eq!(find_roots_quadratic(1f64, 0f64, -1f64), Roots::Two([-1f64, 1f64]));
}
#[test]
fn test_find_roots_quadratic_small_a2() {
assert_eq!(
find_roots_quadratic(1e-20f32, -1f32, -1e-30f32),
Roots::Two([-1e-30f32, 1e20f32])
);
assert_eq!(
find_roots_quadratic(-1e-20f32, 1f32, 1e-30f32),
Roots::Two([-1e-30f32, 1e20f32])
);
assert_eq!(find_roots_quadratic(1e-20f32, -1f32, 1f32), Roots::Two([1f32, 1e20f32]));
assert_eq!(find_roots_quadratic(-1e-20f32, 1f32, 1f32), Roots::Two([-1f32, 1e20f32]));
assert_eq!(find_roots_quadratic(-1e-20f32, 1f32, -1f32), Roots::Two([1f32, 1e20f32]));
}
#[test]
fn test_find_roots_quadratic_big_a1() {
assert_eq!(find_roots_quadratic(1f32, -1e15f32, -1f32), Roots::Two([-1e-15f32, 1e15f32]));
assert_eq!(find_roots_quadratic(-1f32, 1e15f32, 1f32), Roots::Two([-1e-15f32, 1e15f32]));
}
}<|fim▁end|> | }
} else {
// 2*a2 is the greatest divisor, use it
(diff_sign / a2x2, same_sign / a2x2) |
<|file_name|>RemoveRelationKnowhow.java<|end_file_name|><|fim▁begin|>/*
* RemoveRelationKnowhow.java
* Created on 2013/06/28
*
* Copyright (C) 2011-2013 Nippon Telegraph and Telephone Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tubame.knowhow.plugin.ui.view.remove;
import tubame.common.util.CmnStringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import tubame.knowhow.plugin.logic.KnowhowManagement;
import tubame.knowhow.plugin.model.view.CategoryViewType;
import tubame.knowhow.plugin.model.view.KnowhowDetailType;
import tubame.knowhow.plugin.model.view.KnowhowViewType;
import tubame.knowhow.plugin.model.view.PortabilityKnowhowListViewOperation;
import tubame.knowhow.plugin.ui.editor.multi.MaintenanceKnowhowMultiPageEditor;
import tubame.knowhow.plugin.ui.editor.multi.docbook.KnowhowDetailEditor;
import tubame.knowhow.util.PluginUtil;
/**
* Make a related item deletion process know-how information.<br/>
* Delete stick know-how related to the item to be deleted,<br/>
* the item that you want to match the key of its own from the reference list of
* key know-how detailed information,<br/>
* the parent category.<br/>
*/
public class RemoveRelationKnowhow implements RemoveRelationItemStrategy {
/** Logger */
private static final Logger LOGGER = LoggerFactory
.getLogger(RemoveRelationKnowhow.class);
/** Know-how entry view item */
private KnowhowViewType knowhowViewType;
/** Deleted items */
private PortabilityKnowhowListViewOperation portabilityKnowhowListViewOperation;
/**
* Constructor.<br/>
*
* @param portabilityKnowhowListViewOperation
* Deleted items
*/
public RemoveRelationKnowhow(
PortabilityKnowhowListViewOperation portabilityKnowhowListViewOperation) {
this.portabilityKnowhowListViewOperation = portabilityKnowhowListViewOperation;
this.knowhowViewType = (KnowhowViewType) portabilityKnowhowListViewOperation
.getKnowhowViewType();
}
<|fim▁hole|> */
@Override
public void removeRelationItem() {
RemoveRelationKnowhow.LOGGER.debug(CmnStringUtil.EMPTY);
removeKnowhowDetail();
removeEntryViewItem();
}
/**
* Delete key reference to itself from the parent category that is
* registered in the entry view.<br/>
*
*/
private void removeEntryViewItem() {
CategoryViewType categoryViewType = (CategoryViewType) portabilityKnowhowListViewOperation
.getParent().getKnowhowViewType();
String removeTargetKey = null;
for (String knowhowRefKey : categoryViewType.getKnowhowRefKeies()) {
if (knowhowViewType.getRegisterKey().equals(knowhowRefKey)) {
removeTargetKey = knowhowRefKey;
}
}
if (removeTargetKey != null) {
categoryViewType.getKnowhowRefKeies().remove(removeTargetKey);
}
}
/**
* Delete the data that matches the key from its own know-how detail data
* list.<br/>
* Remove know-how detail data that matches the reference key know-how from
* its own know-how detail data list.<br/>
*
*/
private void removeKnowhowDetail() {
KnowhowDetailType removeTargetItem = null;
for (KnowhowDetailType knowhowDetailType : KnowhowManagement
.getKnowhowDetailTypes()) {
if (knowhowDetailType.getKnowhowDetailId().equals(
knowhowViewType.getKnowhowDetailRefKey())) {
removeTargetItem = knowhowDetailType;
}
}
if (removeTargetItem != null) {
KnowhowManagement.getKnowhowDetailTypes().remove(removeTargetItem);
clearKnowhoweDetaileditor(removeTargetItem);
}
}
/**
* Initialization of know-how detail page editor.<br/>
*
* @param removeTargetItem
* Deleted items
*/
private void clearKnowhoweDetaileditor(KnowhowDetailType removeTargetItem) {
MaintenanceKnowhowMultiPageEditor knowhowMultiPageEditor = PluginUtil
.getKnowhowEditor();
KnowhowDetailEditor detailEditor = knowhowMultiPageEditor
.getKnowhowDetailEditor();
if (detailEditor.getKnowhowDetailType() != null) {
if (removeTargetItem.getKnowhowDetailId().equals(
detailEditor.getKnowhowDetailType().getKnowhowDetailId())) {
knowhowMultiPageEditor.clearKnowhowDetail();
}
}
}
}<|fim▁end|> | /**
* {@inheritDoc} |
<|file_name|>test_tensorflow.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri May 3 15:48:55 2019
@author: epnevmatikakis
"""
from caiman.paths import caiman_datadir
from caiman.utils.utils import load_graph
import os
import numpy as np
try:
os.environ["KERAS_BACKEND"] = "tensorflow"
from tensorflow.keras.models import model_from_json
use_keras = True
except(ModuleNotFoundError):
import tensorflow as tf
use_keras = False
def test_tf():
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
try:<|fim▁hole|> if use_keras:
model_file = model_name + ".json"
with open(model_file, 'r') as json_file:
print('USING MODEL:' + model_file)
loaded_model_json = json_file.read()
loaded_model = model_from_json(loaded_model_json)
loaded_model.load_weights(model_name + '.h5')
loaded_model.compile('sgd', 'mse')
else:
model_file = model_name + ".h5.pb"
loaded_model = load_graph(model_file)
except:
raise Exception('NN model could not be loaded. use_keras = ' + str(use_keras))
A = np.random.randn(10, 50, 50, 1)
try:
if use_keras:
predictions = loaded_model.predict(A, batch_size=32)
else:
tf_in = loaded_model.get_tensor_by_name('prefix/conv2d_20_input:0')
tf_out = loaded_model.get_tensor_by_name('prefix/output_node0:0')
with tf.Session(graph=loaded_model) as sess:
predictions = sess.run(tf_out, feed_dict={tf_in: A})
pass
except:
raise Exception('NN model could not be deployed. use_keras = ' + str(use_keras))<|fim▁end|> | model_name = os.path.join(caiman_datadir(), 'model', 'cnn_model') |
<|file_name|>regexDetection.ts<|end_file_name|><|fim▁begin|>/// <reference path="fourslash.ts" />
//// /*1*/15 / /*2*/Math.min(61 / /*3*/42, 32 / 15) / /*4*/15;
goTo.marker("1");
verify.not.quickInfoIs("RegExp");
goTo.marker("2");
verify.not.quickInfoIs("RegExp");
goTo.marker("3");
verify.not.quickInfoIs("RegExp");<|fim▁hole|>verify.not.quickInfoIs("RegExp");<|fim▁end|> | goTo.marker("4"); |
<|file_name|>into_iter.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::Iter;
// pub trait FixedSizeArray<T> {
// /// Converts the array to immutable slice
// fn as_slice(&self) -> &[T];
// /// Converts the array to mutable slice
// fn as_mut_slice(&mut self) -> &mut [T];
// }
// macro_rules! array_impls {
// ($($N:expr)+) => {
// $(
// #[unstable(feature = "core")]
// impl<T> FixedSizeArray<T> for [T; $N] {
// #[inline]
// fn as_slice(&self) -> &[T] {
// &self[..]
// }
// #[inline]
// fn as_mut_slice(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsRef<[T]> for [T; $N] {
// #[inline]
// fn as_ref(&self) -> &[T] {
// &self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsMut<[T]> for [T; $N] {
// #[inline]<|fim▁hole|> // fn as_mut(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Copy> Clone for [T; $N] {
// fn clone(&self) -> [T; $N] {
// *self
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: Hash> Hash for [T; $N] {
// fn hash<H: hash::Hasher>(&self, state: &mut H) {
// Hash::hash(&self[..], state)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: fmt::Debug> fmt::Debug for [T; $N] {
// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// fmt::Debug::fmt(&&self[..], f)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a [T; $N] {
// type Item = &'a T;
// type IntoIter = Iter<'a, T>;
//
// fn into_iter(self) -> Iter<'a, T> {
// self.iter()
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a mut [T; $N] {
// type Item = &'a mut T;
// type IntoIter = IterMut<'a, T>;
//
// fn into_iter(self) -> IterMut<'a, T> {
// self.iter_mut()
// }
// }
//
// // NOTE: some less important impls are omitted to reduce code bloat
// __impl_slice_eq1! { [A; $N], [B; $N] }
// __impl_slice_eq2! { [A; $N], [B] }
// __impl_slice_eq2! { [A; $N], &'b [B] }
// __impl_slice_eq2! { [A; $N], &'b mut [B] }
// // __impl_slice_eq2! { [A; $N], &'b [B; $N] }
// // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Eq> Eq for [T; $N] { }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:PartialOrd> PartialOrd for [T; $N] {
// #[inline]
// fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
// PartialOrd::partial_cmp(&&self[..], &&other[..])
// }
// #[inline]
// fn lt(&self, other: &[T; $N]) -> bool {
// PartialOrd::lt(&&self[..], &&other[..])
// }
// #[inline]
// fn le(&self, other: &[T; $N]) -> bool {
// PartialOrd::le(&&self[..], &&other[..])
// }
// #[inline]
// fn ge(&self, other: &[T; $N]) -> bool {
// PartialOrd::ge(&&self[..], &&other[..])
// }
// #[inline]
// fn gt(&self, other: &[T; $N]) -> bool {
// PartialOrd::gt(&&self[..], &&other[..])
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Ord> Ord for [T; $N] {
// #[inline]
// fn cmp(&self, other: &[T; $N]) -> Ordering {
// Ord::cmp(&&self[..], &&other[..])
// }
// }
// )+
// }
// }
// array_impls! {
// 0 1 2 3 4 5 6 7 8 9
// 10 11 12 13 14 15 16 17 18 19
// 20 21 22 23 24 25 26 27 28 29
// 30 31 32
// }
// impl<'a, T> IntoIterator for &'a [T; $N] {
// type Item = &'a T;
// type IntoIter = Iter<'a, T>;
//
// fn into_iter(self) -> Iter<'a, T> {
// self.iter()
// }
// }
type T = i32;
#[test]
fn into_iter_test1() {
let array: [T; 0] = [];
let array_ref: &[T; 0] = &array;
let len: usize = array_ref.len();
let mut iter: Iter<T> = array_ref.into_iter();
for i in 0..len {
let x: Option<&T> = iter.next();
match x {
Some(v) => assert_eq!(*v, i as T),
None => assert!(false)
}
}
assert_eq!(iter.next(), None::<&T>);
}
#[test]
fn into_iter_test2() {
let array: [T; 0] = [];
let array_ref: &[T; 0] = &array;
let len: usize = array_ref.len();
let mut i: usize = 0;
for v in array_ref {
assert_eq!(*v, i as T);
i += 1;
}
assert_eq!(len, i);
}
}<|fim▁end|> | |
<|file_name|>test_09_ensemble_prediction.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2015-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Creating ensembles predictions
"""
from .world import world, setup_module, teardown_module
from . import create_source_steps as source_create
from . import create_dataset_steps as dataset_create
from . import create_ensemble_steps as ensemble_create
from . import create_prediction_steps as prediction_create
class TestEnsemblePrediction(object):
def setup(self):
"""
Debug information
"""
print("\n-------------------\nTests in: %s\n" % __name__)
def teardown(self):
"""
Debug information
"""<|fim▁hole|> def test_scenario1(self):
"""
Scenario: Successfully creating a prediction from an ensemble:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create an ensemble of <number_of_models> models and <tlp> tlp
And I wait until the ensemble is ready less than <time_3> secs
When I create an ensemble prediction for "<data_input>"
And I wait until the prediction is ready less than <time_4> secs
Then the prediction for "<objective>" is "<prediction>"
Examples:
| data | time_1 | time_2 | time_3 | time_4 | number_of_models | tlp | data_input | objective | prediction |
| ../data/iris.csv | 10 | 10 | 50 | 20 | 5 | 1 | {"petal width": 0.5} | 000004 | Iris-versicolor |
| ../data/iris_sp_chars.csv | 10 | 10 | 50 | 20 | 5 | 1 | {"pétal&width\\u0000": 0.5} | 000004 | Iris-versicolor |
| ../data/grades.csv | 10 | 10 | 150 | 20 | 10 | 1 | {"Assignment": 81.22, "Tutorial": 91.95, "Midterm": 79.38, "TakeHome": 105.93} | 000005 | 88.205575 |
| ../data/grades.csv | 10 | 10 | 150 | 20 | 10 | 1 | {"Assignment": 97.33, "Tutorial": 106.74, "Midterm": 76.88, "TakeHome": 108.89} | 000005 | 84.29401 |
"""
print(self.test_scenario1.__doc__)
examples = [
['data/iris.csv', '30', '30', '50', '20', '5', '1', '{"petal width": 0.5}', '000004', 'Iris-versicolor'],
['data/iris_sp_chars.csv', '30', '30', '50', '20', '5', '1', '{"pétal&width\\u0000": 0.5}', '000004', 'Iris-versicolor'],
['data/grades.csv', '30', '30', '150', '20', '10', '1', '{"Assignment": 81.22, "Tutorial": 91.95, "Midterm": 79.38, "TakeHome": 105.93}', '000005', '84.556'],
['data/grades.csv', '30', '30', '150', '20', '10', '1', '{"Assignment": 97.33, "Tutorial": 106.74, "Midterm": 76.88, "TakeHome": 108.89}', '000005', '73.13558']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
ensemble_create.i_create_an_ensemble(self, example[5], example[6])
ensemble_create.the_ensemble_is_finished_in_less_than(self, example[3])
prediction_create.i_create_an_ensemble_prediction(self, example[7])
prediction_create.the_prediction_is_finished_in_less_than(self, example[4])
prediction_create.the_prediction_is(self, example[8], example[9])<|fim▁end|> | print("\nEnd of tests in: %s\n-------------------\n" % __name__)
|
<|file_name|>readonlyradiobutton.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# (c) Copyright 2001-2009 Hewlett-Packard Development Company, L.P.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA<|fim▁hole|>
# Qt
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class ReadOnlyRadioButton(QRadioButton):
def __init__(self, parent):
QRadioButton.__init__(self, parent)
self.setFocusPolicy(Qt.NoFocus)
self.clearFocus()
def mousePressEvent(self, e):
if e.button() == Qt.LeftButton:
return
QRadioButton.mousePressEvent(e)
def mouseReleaseEvent(self, e):
if e.button() == Qt.LeftButton:
return
QRadioButton.mouseReleaseEvent(e)
def mouseMoveEvent(self, e):
return
def keyPressEvent(self, e):
if e.key() not in (Qt.Key_Up, Qt.Key_Left, Qt.Key_Right,
Qt.Key_Down, Qt.Key_Escape):
return
QRadioButton.keyPressEvent(e)
def keyReleaseEvent(self, e):
return<|fim▁end|> | #
# Authors: Don Welch
# |
<|file_name|>test_ssh_default_creds.py<|end_file_name|><|fim▁begin|>from routersploit.modules.creds.cameras.arecont.ssh_default_creds import Exploit
<|fim▁hole|> """ Test scenario - testing against SSH server """
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 22
assert exploit.threads == 1
assert exploit.defaults == ["admin:", ":"]
assert exploit.stop_on_success is True
assert exploit.verbosity is True
exploit.target = target.host
exploit.port = target.port
assert exploit.check() is False
assert exploit.check_default() is None
assert exploit.run() is None<|fim▁end|> | def test_check_success(target): |
<|file_name|>juce_GlyphArrangement.cpp<|end_file_name|><|fim▁begin|>/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
PositionedGlyph::PositionedGlyph() noexcept
: character (0), glyph (0), x (0), y (0), w (0), whitespace (false)
{
}
PositionedGlyph::PositionedGlyph (const Font& font_, const juce_wchar character_, const int glyph_,
const float x_, const float y_, const float w_, const bool whitespace_)
: font (font_), character (character_), glyph (glyph_),
x (x_), y (y_), w (w_), whitespace (whitespace_)
{
}
PositionedGlyph::PositionedGlyph (const PositionedGlyph& other)
: font (other.font), character (other.character), glyph (other.glyph),
x (other.x), y (other.y), w (other.w), whitespace (other.whitespace)
{
}
PositionedGlyph::~PositionedGlyph() {}
PositionedGlyph& PositionedGlyph::operator= (const PositionedGlyph& other)
{
font = other.font;
character = other.character;
glyph = other.glyph;
x = other.x;
y = other.y;
w = other.w;
whitespace = other.whitespace;
return *this;
}
static inline void drawGlyphWithFont (const Graphics& g, int glyph, const Font& font, const AffineTransform& t)
{
LowLevelGraphicsContext& context = g.getInternalContext();
context.setFont (font);
context.drawGlyph (glyph, t);
}
void PositionedGlyph::draw (const Graphics& g) const
{
if (! isWhitespace())
drawGlyphWithFont (g, glyph, font, AffineTransform::translation (x, y));
}
void PositionedGlyph::draw (const Graphics& g, const AffineTransform& transform) const
{
if (! isWhitespace())
drawGlyphWithFont (g, glyph, font, AffineTransform::translation (x, y).followedBy (transform));
}
void PositionedGlyph::createPath (Path& path) const
{
if (! isWhitespace())
{
if (Typeface* const t = font.getTypeface())
{
Path p;
t->getOutlineForGlyph (glyph, p);
path.addPath (p, AffineTransform::scale (font.getHeight() * font.getHorizontalScale(), font.getHeight())
.translated (x, y));
}
}
}
bool PositionedGlyph::hitTest (float px, float py) const
{
if (getBounds().contains (px, py) && ! isWhitespace())
{
if (Typeface* const t = font.getTypeface())
{
Path p;
t->getOutlineForGlyph (glyph, p);
AffineTransform::translation (-x, -y)
.scaled (1.0f / (font.getHeight() * font.getHorizontalScale()), 1.0f / font.getHeight())
.transformPoint (px, py);
return p.contains (px, py);
}
}
return false;
}
void PositionedGlyph::moveBy (const float deltaX,
const float deltaY)
{
x += deltaX;
y += deltaY;
}
//==============================================================================
GlyphArrangement::GlyphArrangement()
{
glyphs.ensureStorageAllocated (128);
}
GlyphArrangement::GlyphArrangement (const GlyphArrangement& other)
: glyphs (other.glyphs)
{
}
GlyphArrangement& GlyphArrangement::operator= (const GlyphArrangement& other)
{
glyphs = other.glyphs;
return *this;
}
GlyphArrangement::~GlyphArrangement()
{
}
//==============================================================================
void GlyphArrangement::clear()
{
glyphs.clear();
}
PositionedGlyph& GlyphArrangement::getGlyph (const int index) const noexcept
{
return glyphs.getReference (index);
}
//==============================================================================
void GlyphArrangement::addGlyphArrangement (const GlyphArrangement& other)
{
glyphs.addArray (other.glyphs);
}
void GlyphArrangement::addGlyph (const PositionedGlyph& glyph)
{
glyphs.add (glyph);
}
void GlyphArrangement::removeRangeOfGlyphs (int startIndex, const int num)
{
glyphs.removeRange (startIndex, num < 0 ? glyphs.size() : num);
}
//==============================================================================
void GlyphArrangement::addLineOfText (const Font& font,
const String& text,
const float xOffset,
const float yOffset)
{
addCurtailedLineOfText (font, text, xOffset, yOffset, 1.0e10f, false);
}
void GlyphArrangement::addCurtailedLineOfText (const Font& font,
const String& text,
const float xOffset,
const float yOffset,
const float maxWidthPixels,
const bool useEllipsis)
{
if (text.isNotEmpty())
{
Array <int> newGlyphs;
Array <float> xOffsets;
font.getGlyphPositions (text, newGlyphs, xOffsets);
const int textLen = newGlyphs.size();
glyphs.ensureStorageAllocated (glyphs.size() + textLen);
String::CharPointerType t (text.getCharPointer());
for (int i = 0; i < textLen; ++i)
{
const float nextX = xOffsets.getUnchecked (i + 1);
if (nextX > maxWidthPixels + 1.0f)
{
// curtail the string if it's too wide..
if (useEllipsis && textLen > 3 && glyphs.size() >= 3)
insertEllipsis (font, xOffset + maxWidthPixels, 0, glyphs.size());
break;
}
else
{
const float thisX = xOffsets.getUnchecked (i);
const bool isWhitespace = t.isWhitespace();
glyphs.add (PositionedGlyph (font, t.getAndAdvance(),
newGlyphs.getUnchecked(i),
xOffset + thisX, yOffset,
nextX - thisX, isWhitespace));
}
}
}
}
int GlyphArrangement::insertEllipsis (const Font& font, const float maxXPos,
const int startIndex, int endIndex)
{
int numDeleted = 0;
if (glyphs.size() > 0)
{
Array<int> dotGlyphs;
Array<float> dotXs;
font.getGlyphPositions ("..", dotGlyphs, dotXs);
const float dx = dotXs[1];
float xOffset = 0.0f, yOffset = 0.0f;
while (endIndex > startIndex)
{
const PositionedGlyph& pg = glyphs.getReference (--endIndex);
xOffset = pg.x;
yOffset = pg.y;
glyphs.remove (endIndex);
++numDeleted;
if (xOffset + dx * 3 <= maxXPos)
break;
}
for (int i = 3; --i >= 0;)
{
glyphs.insert (endIndex++, PositionedGlyph (font, '.', dotGlyphs.getFirst(),
xOffset, yOffset, dx, false));
--numDeleted;
xOffset += dx;
if (xOffset > maxXPos)
break;
}
}
return numDeleted;
}
void GlyphArrangement::addJustifiedText (const Font& font,
const String& text,
float x, float y,
const float maxLineWidth,
Justification horizontalLayout)
{
int lineStartIndex = glyphs.size();
addLineOfText (font, text, x, y);
const float originalY = y;
while (lineStartIndex < glyphs.size())
{
int i = lineStartIndex;
if (glyphs.getReference(i).getCharacter() != '\n'
&& glyphs.getReference(i).getCharacter() != '\r')
++i;
const float lineMaxX = glyphs.getReference (lineStartIndex).getLeft() + maxLineWidth;
int lastWordBreakIndex = -1;
while (i < glyphs.size())
{
const PositionedGlyph& pg = glyphs.getReference (i);
const juce_wchar c = pg.getCharacter();
if (c == '\r' || c == '\n')
{
++i;
if (c == '\r' && i < glyphs.size()
&& glyphs.getReference(i).getCharacter() == '\n')
++i;
break;
}
else if (pg.isWhitespace())
{
lastWordBreakIndex = i + 1;
}
else if (pg.getRight() - 0.0001f >= lineMaxX)
{
if (lastWordBreakIndex >= 0)
i = lastWordBreakIndex;
break;
}
++i;
}
const float currentLineStartX = glyphs.getReference (lineStartIndex).getLeft();
float currentLineEndX = currentLineStartX;
for (int j = i; --j >= lineStartIndex;)
{
if (! glyphs.getReference (j).isWhitespace())
{
currentLineEndX = glyphs.getReference (j).getRight();
break;
}
}
float deltaX = 0.0f;
if (horizontalLayout.testFlags (Justification::horizontallyJustified))
spreadOutLine (lineStartIndex, i - lineStartIndex, maxLineWidth);
else if (horizontalLayout.testFlags (Justification::horizontallyCentred))
deltaX = (maxLineWidth - (currentLineEndX - currentLineStartX)) * 0.5f;
else if (horizontalLayout.testFlags (Justification::right))
deltaX = maxLineWidth - (currentLineEndX - currentLineStartX);
moveRangeOfGlyphs (lineStartIndex, i - lineStartIndex,
x + deltaX - currentLineStartX, y - originalY);
lineStartIndex = i;
y += font.getHeight();
}
}
void GlyphArrangement::addFittedText (const Font& f,
const String& text,
const float x, const float y,
const float width, const float height,
Justification layout,
int maximumLines,
const float minimumHorizontalScale)
{
// doesn't make much sense if this is outside a sensible range of 0.5 to 1.0
jassert (minimumHorizontalScale > 0 && minimumHorizontalScale <= 1.0f);
if (text.containsAnyOf ("\r\n"))
{
GlyphArrangement ga;
ga.addJustifiedText (f, text, x, y, width, layout);
const Rectangle<float> bb (ga.getBoundingBox (0, -1, false));
float dy = y - bb.getY();
if (layout.testFlags (Justification::verticallyCentred)) dy += (height - bb.getHeight()) * 0.5f;
else if (layout.testFlags (Justification::bottom)) dy += (height - bb.getHeight());
ga.moveRangeOfGlyphs (0, -1, 0.0f, dy);
glyphs.addArray (ga.glyphs);
return;
}
int startIndex = glyphs.size();
addLineOfText (f, text.trim(), x, y);
if (glyphs.size() > startIndex)
{
float lineWidth = glyphs.getReference (glyphs.size() - 1).getRight()
- glyphs.getReference (startIndex).getLeft();
if (lineWidth <= 0)
return;
if (lineWidth * minimumHorizontalScale < width)
{
if (lineWidth > width)
stretchRangeOfGlyphs (startIndex, glyphs.size() - startIndex,
width / lineWidth);
justifyGlyphs (startIndex, glyphs.size() - startIndex,
x, y, width, height, layout);
}
else if (maximumLines <= 1)
{
fitLineIntoSpace (startIndex, glyphs.size() - startIndex,
x, y, width, height, f, layout, minimumHorizontalScale);
}
else
{
Font font (f);
String txt (text.trim());
const int length = txt.length();
const int originalStartIndex = startIndex;
int numLines = 1;
if (length <= 12 && ! txt.containsAnyOf (" -\t\r\n"))
maximumLines = 1;
maximumLines = jmin (maximumLines, length);
while (numLines < maximumLines)
{
++numLines;
const float newFontHeight = height / (float) numLines;
if (newFontHeight < font.getHeight())
{
font.setHeight (jmax (8.0f, newFontHeight));
removeRangeOfGlyphs (startIndex, -1);
addLineOfText (font, txt, x, y);
lineWidth = glyphs.getReference (glyphs.size() - 1).getRight()
- glyphs.getReference (startIndex).getLeft();
}
if (numLines > lineWidth / width || newFontHeight < 8.0f)
break;
}
if (numLines < 1)
numLines = 1;
float lineY = y;
float widthPerLine = lineWidth / numLines;
for (int line = 0; line < numLines; ++line)
{<|fim▁hole|> if (line == numLines - 1)
{
widthPerLine = width;
i = glyphs.size();
}
else
{
while (i < glyphs.size())
{
lineWidth = (glyphs.getReference (i).getRight() - lineStartX);
if (lineWidth > widthPerLine)
{
// got to a point where the line's too long, so skip forward to find a
// good place to break it..
const int searchStartIndex = i;
while (i < glyphs.size())
{
if ((glyphs.getReference (i).getRight() - lineStartX) * minimumHorizontalScale < width)
{
if (glyphs.getReference (i).isWhitespace()
|| glyphs.getReference (i).getCharacter() == '-')
{
++i;
break;
}
}
else
{
// can't find a suitable break, so try looking backwards..
i = searchStartIndex;
for (int back = 1; back < jmin (7, i - startIndex - 1); ++back)
{
if (glyphs.getReference (i - back).isWhitespace()
|| glyphs.getReference (i - back).getCharacter() == '-')
{
i -= back - 1;
break;
}
}
break;
}
++i;
}
break;
}
++i;
}
int wsStart = i;
while (wsStart > 0 && glyphs.getReference (wsStart - 1).isWhitespace())
--wsStart;
int wsEnd = i;
while (wsEnd < glyphs.size() && glyphs.getReference (wsEnd).isWhitespace())
++wsEnd;
removeRangeOfGlyphs (wsStart, wsEnd - wsStart);
i = jmax (wsStart, startIndex + 1);
}
i -= fitLineIntoSpace (startIndex, i - startIndex,
x, lineY, width, font.getHeight(), font,
layout.getOnlyHorizontalFlags() | Justification::verticallyCentred,
minimumHorizontalScale);
startIndex = i;
lineY += font.getHeight();
if (startIndex >= glyphs.size())
break;
}
justifyGlyphs (originalStartIndex, glyphs.size() - originalStartIndex,
x, y, width, height, layout.getFlags() & ~Justification::horizontallyJustified);
}
}
}
//==============================================================================
void GlyphArrangement::moveRangeOfGlyphs (int startIndex, int num, const float dx, const float dy)
{
jassert (startIndex >= 0);
if (dx != 0.0f || dy != 0.0f)
{
if (num < 0 || startIndex + num > glyphs.size())
num = glyphs.size() - startIndex;
while (--num >= 0)
glyphs.getReference (startIndex++).moveBy (dx, dy);
}
}
int GlyphArrangement::fitLineIntoSpace (int start, int numGlyphs, float x, float y, float w, float h, const Font& font,
Justification justification, float minimumHorizontalScale)
{
int numDeleted = 0;
const float lineStartX = glyphs.getReference (start).getLeft();
float lineWidth = glyphs.getReference (start + numGlyphs - 1).getRight() - lineStartX;
if (lineWidth > w)
{
if (minimumHorizontalScale < 1.0f)
{
stretchRangeOfGlyphs (start, numGlyphs, jmax (minimumHorizontalScale, w / lineWidth));
lineWidth = glyphs.getReference (start + numGlyphs - 1).getRight() - lineStartX - 0.5f;
}
if (lineWidth > w)
{
numDeleted = insertEllipsis (font, lineStartX + w, start, start + numGlyphs);
numGlyphs -= numDeleted;
}
}
justifyGlyphs (start, numGlyphs, x, y, w, h, justification);
return numDeleted;
}
void GlyphArrangement::stretchRangeOfGlyphs (int startIndex, int num,
const float horizontalScaleFactor)
{
jassert (startIndex >= 0);
if (num < 0 || startIndex + num > glyphs.size())
num = glyphs.size() - startIndex;
if (num > 0)
{
const float xAnchor = glyphs.getReference (startIndex).getLeft();
while (--num >= 0)
{
PositionedGlyph& pg = glyphs.getReference (startIndex++);
pg.x = xAnchor + (pg.x - xAnchor) * horizontalScaleFactor;
pg.font.setHorizontalScale (pg.font.getHorizontalScale() * horizontalScaleFactor);
pg.w *= horizontalScaleFactor;
}
}
}
Rectangle<float> GlyphArrangement::getBoundingBox (int startIndex, int num, const bool includeWhitespace) const
{
jassert (startIndex >= 0);
if (num < 0 || startIndex + num > glyphs.size())
num = glyphs.size() - startIndex;
Rectangle<float> result;
while (--num >= 0)
{
const PositionedGlyph& pg = glyphs.getReference (startIndex++);
if (includeWhitespace || ! pg.isWhitespace())
result = result.getUnion (pg.getBounds());
}
return result;
}
void GlyphArrangement::justifyGlyphs (const int startIndex, const int num,
const float x, const float y, const float width, const float height,
Justification justification)
{
jassert (num >= 0 && startIndex >= 0);
if (glyphs.size() > 0 && num > 0)
{
const Rectangle<float> bb (getBoundingBox (startIndex, num, ! justification.testFlags (Justification::horizontallyJustified
| Justification::horizontallyCentred)));
float deltaX = 0.0f, deltaY = 0.0f;
if (justification.testFlags (Justification::horizontallyJustified)) deltaX = x - bb.getX();
else if (justification.testFlags (Justification::horizontallyCentred)) deltaX = x + (width - bb.getWidth()) * 0.5f - bb.getX();
else if (justification.testFlags (Justification::right)) deltaX = x + width - bb.getRight();
else deltaX = x - bb.getX();
if (justification.testFlags (Justification::top)) deltaY = y - bb.getY();
else if (justification.testFlags (Justification::bottom)) deltaY = y + height - bb.getBottom();
else deltaY = y + (height - bb.getHeight()) * 0.5f - bb.getY();
moveRangeOfGlyphs (startIndex, num, deltaX, deltaY);
if (justification.testFlags (Justification::horizontallyJustified))
{
int lineStart = 0;
float baseY = glyphs.getReference (startIndex).getBaselineY();
int i;
for (i = 0; i < num; ++i)
{
const float glyphY = glyphs.getReference (startIndex + i).getBaselineY();
if (glyphY != baseY)
{
spreadOutLine (startIndex + lineStart, i - lineStart, width);
lineStart = i;
baseY = glyphY;
}
}
if (i > lineStart)
spreadOutLine (startIndex + lineStart, i - lineStart, width);
}
}
}
void GlyphArrangement::spreadOutLine (const int start, const int num, const float targetWidth)
{
if (start + num < glyphs.size()
&& glyphs.getReference (start + num - 1).getCharacter() != '\r'
&& glyphs.getReference (start + num - 1).getCharacter() != '\n')
{
int numSpaces = 0;
int spacesAtEnd = 0;
for (int i = 0; i < num; ++i)
{
if (glyphs.getReference (start + i).isWhitespace())
{
++spacesAtEnd;
++numSpaces;
}
else
{
spacesAtEnd = 0;
}
}
numSpaces -= spacesAtEnd;
if (numSpaces > 0)
{
const float startX = glyphs.getReference (start).getLeft();
const float endX = glyphs.getReference (start + num - 1 - spacesAtEnd).getRight();
const float extraPaddingBetweenWords
= (targetWidth - (endX - startX)) / (float) numSpaces;
float deltaX = 0.0f;
for (int i = 0; i < num; ++i)
{
glyphs.getReference (start + i).moveBy (deltaX, 0.0f);
if (glyphs.getReference (start + i).isWhitespace())
deltaX += extraPaddingBetweenWords;
}
}
}
}
//==============================================================================
inline void GlyphArrangement::drawGlyphUnderline (const Graphics& g, const PositionedGlyph& pg,
const int i, const AffineTransform& transform) const
{
const float lineThickness = (pg.font.getDescent()) * 0.3f;
float nextX = pg.x + pg.w;
if (i < glyphs.size() - 1 && glyphs.getReference (i + 1).y == pg.y)
nextX = glyphs.getReference (i + 1).x;
Path p;
p.addRectangle (pg.x, pg.y + lineThickness * 2.0f, nextX - pg.x, lineThickness);
g.fillPath (p, transform);
}
void GlyphArrangement::draw (const Graphics& g) const
{
for (int i = 0; i < glyphs.size(); ++i)
{
const PositionedGlyph& pg = glyphs.getReference(i);
if (pg.font.isUnderlined())
drawGlyphUnderline (g, pg, i, AffineTransform::identity);
pg.draw (g);
}
}
void GlyphArrangement::draw (const Graphics& g, const AffineTransform& transform) const
{
for (int i = 0; i < glyphs.size(); ++i)
{
const PositionedGlyph& pg = glyphs.getReference(i);
if (pg.font.isUnderlined())
drawGlyphUnderline (g, pg, i, transform);
pg.draw (g, transform);
}
}
void GlyphArrangement::createPath (Path& path) const
{
for (int i = 0; i < glyphs.size(); ++i)
glyphs.getReference (i).createPath (path);
}
int GlyphArrangement::findGlyphIndexAt (const float x, const float y) const
{
for (int i = 0; i < glyphs.size(); ++i)
if (glyphs.getReference (i).hitTest (x, y))
return i;
return -1;
}<|fim▁end|> | int i = startIndex;
float lineStartX = glyphs.getReference (startIndex).getLeft();
|
<|file_name|>RelationPlayers.java<|end_file_name|><|fim▁begin|>/*
* Grakn - A Distributed Semantic Database
* Copyright (C) 2016 Grakn Labs Limited
*
* Grakn is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Grakn is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Grakn. If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
*/
package ai.grakn.generator;
import ai.grakn.concept.Label;
import ai.grakn.graql.admin.RelationPlayer;
import ai.grakn.graql.admin.VarPatternAdmin;
import static ai.grakn.graql.Graql.var;
/**<|fim▁hole|>
public RelationPlayers() {
super(RelationPlayer.class);
}
@Override
public RelationPlayer generate() {
if (random.nextBoolean()) {
return RelationPlayer.of(gen(VarPatternAdmin.class));
} else {
VarPatternAdmin varPattern;
if (random.nextBoolean()) {
varPattern = var().label(gen(Label.class)).admin();
} else {
varPattern = gen(VarPatternAdmin.class);
}
return RelationPlayer.of(varPattern, gen(VarPatternAdmin.class));
}
}
}<|fim▁end|> | * @author Felix Chapman
*/
public class RelationPlayers extends AbstractGenerator<RelationPlayer> { |
<|file_name|>bsfs.js<|end_file_name|><|fim▁begin|>var test = require('tape');
var BSFS = require('../lib/bsfs');
indexedDB.deleteDatabase('bsfs-tests');
function randomId () { return Math.random().toString(36).substr(2) }
test('bsfs exists', function (t) {
t.plan(1);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs._ready) return bsfs.once('ready', ready);
else process.nextTick(ready);
function ready () {
t.equals(typeof bsfs, 'object');
}
});
test('bsfs has file router', function (t) {
t.plan(1);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs._ready) return bsfs.once('ready', ready);
else process.nextTick(ready);
function ready () {
t.equal(typeof bsfs._fileRouter, 'object');
}
});
test('write without path throws', function (t) {
t.plan(1);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs._ready) return bsfs.once('ready', ready);
else process.nextTick(ready);
function ready () {
t.throws(function() {<|fim▁hole|> bsfs.createWriteStream(null, function() {})
});
}
});
test('write file', function (t) {
t.plan(1);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs._ready) return bsfs.once('ready', ready);
else process.nextTick(ready);
function ready () {
var path = '/tmp/test-' + randomId();
var content = 'Hello cruel world ' + randomId();
var w = bsfs.createWriteStream(path, function(err, meta) {
t.equal(err, null);
});
w.end(content);
}
});
test('write then read file by key', function (t) {
t.plan(1);
var path = '/tmp/test-' + randomId();
var content = 'Hello cruel world ' + randomId();
var bsfs = new BSFS('bsfs-tests');
if (!bsfs._ready) return bsfs.once('ready', ready);
else process.nextTick(ready);
function readBack (key) {
var r = bsfs.createReadStreamFromKey(key);
var readContent = '';
r.on('data', function (chunk) {
readContent += chunk;
});
r.on('end', function () {
t.equal(content, readContent);
});
}
function ready () {
var w = bsfs.createWriteStream(path, function(err, meta) {
readBack(meta.key);
});
w.end(content);
}
});
test('write then read file by name', function (t) {
t.plan(1);
var content = 'Hello cruel world ' + randomId();
var path = '/tmp/test-' + randomId();
var bsfs = new BSFS('bsfs-tests');
if (!bsfs._ready) return bsfs.once('ready', ready);
else process.nextTick(ready);
function readBack (path) {
var r = bsfs.createReadStream(path);
var readContent = '';
r.on('data', function (chunk) {
readContent += chunk;
});
r.on('end', function () {
t.equal(content, readContent);
});
}
function ready () {
var w = bsfs.createWriteStream(path, function(err, meta) {
readBack(path);
});
w.end(content);
}
});
test('access', function (t) {
t.test('is silent (for now)', function (t) {
t.plan(3);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs.ready) return bsfs.once('ready', ready)
else process.nextTick(ready);
function ready () {
bsfs.access(null, function (err) {
t.ifError(err);
});
bsfs.access('/tmp', function (err) {
t.ifError(err);
});
bsfs.access('/tmp', 2, function (err) {
t.ifError(err);
});
}
});
t.test('throws on invalid callback argument', function (t) {
t.plan(1);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs.ready) return bsfs.once('ready', ready)
else process.nextTick(ready);
function ready () {
t.throws(function () {
bsfs.access('/tmp/', 0, 'potatoe');
})
}
});
});
test('accessSync', function (t) {
t.test('is silent (for now)', function (t) {
t.plan(2);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs.ready) return bsfs.once('ready', ready)
else process.nextTick(ready);
function ready () {
t.ifError(bsfs.accessSync(randomId()));
t.ifError(bsfs.accessSync());
}
})
});
test('exists', function (t) {
t.test('is true for all paths (for now)', function (t) {
t.plan(1);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs.ready) return bsfs.once('ready', ready);
else proccess.nextTick(ready);
function ready () {
bsfs.exists(randomId(), function (exists) {
t.ok(exists);
});
}
});
});
test('existsSync', function (t) {
t.test('throws on null path', function (t) {
t.plan(1);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs.ready) return bsfs.once('ready', ready);
else proccess.nextTick(ready);
function ready () {
t.throws(bsfs.existsSync());
}
});
t.test('is true for all paths (for now)', function (t) {
t.plan(2);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs.ready) return bsfs.once('ready', ready);
else proccess.nextTick(ready);
function ready () {
t.ok(bsfs.existsSync(randomId()));
t.ok(bsfs.existsSync());
}
});
});
test('appendFile without path throws', function (t) {
t.plan(1);
var bsfs = new BSFS('bsfs-tests');
if (!bsfs._ready) return bsfs.once('ready', ready);
else process.nextTick(ready);
function ready () {
t.throws(function () {
bsfs.appendFile(null, function () {});
});
}
});<|fim▁end|> | |
<|file_name|>utils_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package preflight
import (
"testing"
"github.com/pkg/errors"
utilsexec "k8s.io/utils/exec"
fakeexec "k8s.io/utils/exec/testing"
)
func TestGetKubeletVersion(t *testing.T) {
// TODO: Re-enable this test
// fakeexec.FakeCmd supports only combined output.
// Hence .Output() returns a "not supported" error and we cannot use it for the test ATM.
t.Skip()
cases := []struct {
output string
expected string
err error
valid bool
}{
{"Kubernetes v1.7.0", "1.7.0", nil, true},
{"Kubernetes v1.8.0-alpha.2.1231+afabd012389d53a", "1.8.0-alpha.2.1231+afabd012389d53a", nil, true},
{"something-invalid", "", nil, false},
{"command not found", "", errors.New("kubelet not found"), false},
{"", "", nil, false},
}
for _, tc := range cases {
t.Run(tc.output, func(t *testing.T) {
fcmd := fakeexec.FakeCmd{
CombinedOutputScript: []fakeexec.FakeCombinedOutputAction{
func() ([]byte, error) { return []byte(tc.output), tc.err },
},
}
fexec := &fakeexec.FakeExec{
CommandScript: []fakeexec.FakeCommandAction{
func(cmd string, args ...string) utilsexec.Cmd { return fakeexec.InitFakeCmd(&fcmd, cmd, args...) },
},
}<|fim▁hole|> case err == nil && !tc.valid:
t.Errorf("GetKubeletVersion: error expected for key %q, but result is %q", tc.output, ver)
case ver != nil && ver.String() != tc.expected:
t.Errorf("GetKubeletVersion: unexpected version result for key %q. Expected: %q Actual: %q", tc.output, tc.expected, ver)
}
})
}
}<|fim▁end|> | ver, err := GetKubeletVersion(fexec)
switch {
case err != nil && tc.valid:
t.Errorf("GetKubeletVersion: unexpected error for %q. Error: %v", tc.output, err) |
<|file_name|>plugin.min.js<|end_file_name|><|fim▁begin|>tinymce.PluginManager.add("insertdatetime", function (e) {
function t(t, n) {
function i(e, t) {
if (e = "" + e, e.length < t)for (var n = 0; n < t - e.length; n++)e = "0" + e;
return e
}
return n = n || new Date, t = t.replace("%D", "%m/%d/%Y"), t = t.replace("%r", "%I:%M:%S %p"), t = t.replace("%Y", "" + n.getFullYear()), t = t.replace("%y", "" + n.getYear()), t = t.replace("%m", i(n.getMonth() + 1, 2)), t = t.replace("%d", i(n.getDate(), 2)), t = t.replace("%H", "" + i(n.getHours(), 2)), t = t.replace("%M", "" + i(n.getMinutes(), 2)), t = t.replace("%S", "" + i(n.getSeconds(), 2)), t = t.replace("%I", "" + ((n.getHours() + 11) % 12 + 1)), t = t.replace("%p", "" + (n.getHours() < 12 ? "AM" : "PM")), t = t.replace("%B", "" + e.translate(s[n.getMonth()])), t = t.replace("%b", "" + e.translate(o[n.getMonth()])), t = t.replace("%A", "" + e.translate(r[n.getDay()])), t = t.replace("%a", "" + e.translate(a[n.getDay()])), t = t.replace("%%", "%")
}<|fim▁hole|>
function n(n) {
var i = t(n);
if (e.settings.insertdatetime_element) {
var a;
a = /%[HMSIp]/.test(n) ? t("%Y-%m-%dT%H:%M") : t("%Y-%m-%d"), i = '<time datetime="' + a + '">' + i + "</time>";
var r = e.dom.getParent(e.selection.getStart(), "time");
if (r)return e.dom.setOuterHTML(r, i), void 0
}
e.insertContent(i)
}
var i, a = "Sun Mon Tue Wed Thu Fri Sat Sun".split(" "), r = "Sunday Monday Tuesday Wednesday Thursday Friday Saturday Sunday".split(" "), o = "Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(" "), s = "January February March April May June July August September October November December".split(" "), l = [];
e.addCommand("mceInsertDate", function () {
n(e.getParam("insertdatetime_dateformat", e.translate("%Y-%m-%d")))
}), e.addCommand("mceInsertTime", function () {
n(e.getParam("insertdatetime_timeformat", e.translate("%H:%M:%S")))
}), e.addButton("inserttime", {
type: "splitbutton", title: "Insert time", onclick: function () {
n(i || "%H:%M:%S")
}, menu: l
}), tinymce.each(e.settings.insertdatetime_formats || ["%H:%M:%S", "%Y-%m-%d", "%I:%M:%S %p", "%D"], function (e) {
l.push({
text: t(e), onclick: function () {
i = e, n(e)
}
})
}), e.addMenuItem("insertdatetime", {icon: "date", text: "Insert date/time", menu: l, context: "insert"})
});<|fim▁end|> | |
<|file_name|>ServiceType.java<|end_file_name|><|fim▁begin|>package com.mrmq.poker.common.glossary;
public enum ServiceType {
ADMIN("admin"),
POKER("poker");
String value;
private ServiceType(String value) {
this.value = value;
}
public String getValue(){
return value;<|fim▁hole|>}<|fim▁end|> | } |
<|file_name|>calculadora.js<|end_file_name|><|fim▁begin|>function init(){
listar();
calcular();
}
function calcular(){
var tabla=$('#tbllistado').DataTable({
"paging": false,
"bInfo" : false,
"searching": false,
"bLengthChange": false //used to hide the property
});
/*FUNCION QUE DETECTA EL CLICK Y EL ID DE LA FILA*/
/*
$('#tbllistado').on('click', 'tr', function () {
var id = tabla.row(this).id();
alert('Clicked row id ' + id);
});*/
var total = 0;
var totalFeriado = 0;
/*FUNCION QUE DETECTA EL CHANGE Y EL VALOR PARA CANT*/
$('#tbllistado').on('change', '.normal', function () {
var id = $(this).val();
/*DESACTIVAMOS EL INPUT*/
$(this).prop('disabled', true);
var neto = parseInt($(this).parent().parent().find(".neto").html(), 10);
var monto = id*neto;
total = total+monto;
totalFeriado = total+(total*0.20);
$("#total").html('Normal: '+total+' / Feriado: '+totalFeriado);
});
}
<|fim▁hole|>}
init();<|fim▁end|> | function listar(){
$.post('controllers/tarifas.php?op=listarHTML',function(respuesta){
$("#contenido").html(respuesta);
}); |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export * from './date-picker.module';
export * from './date-picker.component'; |
<|file_name|>generation.py<|end_file_name|><|fim▁begin|># Copyright 2012 Energid Technologies
from energid_nlp import logic
GENERATION_PROPOSITION = '$generate'
class Error(Exception):
pass
class Generator:
def __init__(self, kb):
self.kb = kb
def generate_prim(self, concept):
if isinstance(concept, logic.Description):
return self.generate_prim(concept.base)
elif isinstance(concept, logic.Expr):
return self.generate_prim(concept.op)
else:
result = '%s' % (concept,)<|fim▁hole|> return str(result)
def generate(self, concept):
if (isinstance(concept, str) or
isinstance(concept, logic.Description) or
isinstance(concept, logic.Expr)):
template = self.kb.slot_value(concept, GENERATION_PROPOSITION)
if template is None:
return self.generate_prim(concept)
else:
return self.generate_template(concept, template.op)
else:
return self.generate_prim(concept)
def generate_template(self, concept, template):
result_string = ''
start = 0
while start < len(template):
slot_start = template.find('{', start)
if slot_start == -1:
# No more slot refs
result_string = result_string + template[start:]
break
result_string = result_string + template[start:slot_start]
slot_end = template.find('}', slot_start + 1)
if slot_end == -1:
raise Error("Generation template %r for %s has an unclosed '{'" % (
template, concept))
slot_name = template[slot_start + 1:slot_end]
slot_value = self.kb.slot_value(concept, slot_name)
if slot_value is not None:
result_string = result_string + self.generate(slot_value)
start = slot_end + 1
# Strip whitepace out of result, in case slots in template
# couldn't be filled.
return result_string.strip()<|fim▁end|> | |
<|file_name|>response_status.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git<|fim▁hole|>
import pprint
import re # noqa: F401
import six
from wavefront_api_client.configuration import Configuration
class ResponseStatus(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'code': 'int',
'message': 'str',
'result': 'str'
}
attribute_map = {
'code': 'code',
'message': 'message',
'result': 'result'
}
def __init__(self, code=None, message=None, result=None, _configuration=None): # noqa: E501
"""ResponseStatus - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._code = None
self._message = None
self._result = None
self.discriminator = None
self.code = code
if message is not None:
self.message = message
self.result = result
@property
def code(self):
"""Gets the code of this ResponseStatus. # noqa: E501
HTTP Response code corresponding to this response # noqa: E501
:return: The code of this ResponseStatus. # noqa: E501
:rtype: int
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this ResponseStatus.
HTTP Response code corresponding to this response # noqa: E501
:param code: The code of this ResponseStatus. # noqa: E501
:type: int
"""
if self._configuration.client_side_validation and code is None:
raise ValueError("Invalid value for `code`, must not be `None`") # noqa: E501
self._code = code
@property
def message(self):
"""Gets the message of this ResponseStatus. # noqa: E501
Descriptive message of the status of this response # noqa: E501
:return: The message of this ResponseStatus. # noqa: E501
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this ResponseStatus.
Descriptive message of the status of this response # noqa: E501
:param message: The message of this ResponseStatus. # noqa: E501
:type: str
"""
self._message = message
@property
def result(self):
"""Gets the result of this ResponseStatus. # noqa: E501
:return: The result of this ResponseStatus. # noqa: E501
:rtype: str
"""
return self._result
@result.setter
def result(self, result):
"""Sets the result of this ResponseStatus.
:param result: The result of this ResponseStatus. # noqa: E501
:type: str
"""
if self._configuration.client_side_validation and result is None:
raise ValueError("Invalid value for `result`, must not be `None`") # noqa: E501
allowed_values = ["OK", "ERROR"] # noqa: E501
if (self._configuration.client_side_validation and
result not in allowed_values):
raise ValueError(
"Invalid value for `result` ({0}), must be one of {1}" # noqa: E501
.format(result, allowed_values)
)
self._result = result
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResponseStatus, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResponseStatus):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ResponseStatus):
return True
return self.to_dict() != other.to_dict()<|fim▁end|> | """
|
<|file_name|>alpha.go<|end_file_name|><|fim▁begin|><|fim▁hole|>Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
"github.com/spf13/cobra"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/kubectl/pkg/util/templates"
cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
"k8s.io/kubernetes/pkg/kubectl/util/i18n"
)
// NewCmdAlpha creates a command that acts as an alternate root command for features in alpha
func NewCmdAlpha(f cmdutil.Factory, streams genericclioptions.IOStreams) *cobra.Command {
cmd := &cobra.Command{
Use: "alpha",
Short: i18n.T("Commands for features in alpha"),
Long: templates.LongDesc(i18n.T("These commands correspond to alpha features that are not enabled in Kubernetes clusters by default.")),
}
// Alpha commands should be added here. As features graduate from alpha they should move
// from here to the CommandGroups defined by NewKubeletCommand() in cmd.go.
//cmd.AddCommand(NewCmdDebug(f, in, out, err))
// NewKubeletCommand() will hide the alpha command if it has no subcommands. Overriding
// the help function ensures a reasonable message if someone types the hidden command anyway.
if !cmd.HasSubCommands() {
cmd.SetHelpFunc(func(*cobra.Command, []string) {
cmd.Println(i18n.T("No alpha commands are available in this version of kubectl"))
})
}
return cmd
}<|fim▁end|> | /*
Copyright 2017 The Kubernetes Authors.
|
<|file_name|>insert_input.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
# Author: Adrien Bibal
# Date: 2014
# Insert the student answer in the correction framework file.
import sys
import codecs
input_file = sys.stdin # input = file containing the student answer.
oz_file = codecs.open("/task/task.oz", "r", "utf8") # Open the "correction framework file".
new_file = codecs.open("new_file.oz", "w","utf8") # Open the final file.
for line in oz_file:
# "@@q1@@" is the arbitrary marker used to say "insert the student answer here".
if "@@q1@@" in line :
for input_line in input_file :
if '\0' in input_line :
input_line = input_line.strip('\0')
new_file.write(input_line) # Copy each line from the student answer to the final file.
else :
new_file.write(line) # Copy each line from the "correction framework file" to the final file.
<|fim▁hole|><|fim▁end|> | oz_file.close()
new_file.close() |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
"""
Simulate DSR over a network of nodes.
Revision Info
=============
* $LastChangedBy: mandke $
* $LastChangedDate: 2011-10-26 21:51:40 -0500 (Wed, 26 Oct 2011) $
* $LastChangedRevision: 5314 $
:author: Ketan Mandke <[email protected]>
:copyright:
Copyright 2009-2011 The University of Texas at Austin
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__docformat__ = "restructuredtext en"
from SimPy.Simulation import *
from scapy.all import *
from wins import *
from wins.ieee80211 import *
from copy import copy, deepcopy
from wins.backend import RNG_init
from wins.backend import *
from wins.mac import RBAR, ARF
from wins.net import DSR
from wins.traffic import Agent
import sys
from optparse import OptionParser
import numpy as np
import struct
import gc
import time
RNG_INIT = 1
EXIT_WITH_TRACE = 1
class Node(Element):
name = "node"
tracename = "NODE"
def __init__(self, **kwargs):
Element.__init__(self, **kwargs)
def configure(self, pos=None, # motion \
useshared=False, # arp \
cfocorrection=True, # phy \
usecsma=False, # mac \
rreqrate=None, datarate=None, # net \
dest=None, plen=None, delay=None, mode=None, # agent \
**kwargs):
cif = self.newchild('cif', Dot11NRadio)
phy = self.newchild('phy', Dot11NPHY, radio=cif, cfocorrection=cfocorrection)
mac = self.newchild('mac', DCF, usecsma=usecsma, phy=phy)
net = self.newchild('net', DSR, rreqrate=rreqrate, datarate=datarate)
arp = self.newchild('arp', ARP, useshared=useshared)
agt = self.newchild('agent', Agent, dest=dest, plen=plen, \
delay=delay, mode=mode)
mobi = self.newchild('motion', Motion, pos=pos)
# connect ports
agt.connect(net)
arp.connect(net, mac)
mac.connect(phy)
phy.connect(cif)
def read_topo(options, topofile):
"""Read topology layout from file."""
f = file(topofile, 'r')
s = f.readline()
topo = {'border':None, 'layout':None}
done = not (s)
while not done:
# convert s to dict (check for border and layout)
try:
d = eval(s)
assert isinstance(d, dict)
assert ('border' in d) and ('layout' in d)
except:
d = None
# add dict to topo
if d: topo.update(d)
# get next input
s = f.readline()
done = not(s)
f.close()
return topo
def read_route(options, routefile):
"""Read routing tables from file."""
f = file(routefile, 'r')
s = f.readline()
routedata = {}
done = not (s)
while not done:
# convert s to dict
try:
d = eval(s)
assert isinstance(d, dict)
for x,y in d.items():
# maps src x -> routing table y
assert isinstance(y, dict)
for a,b in y.items():
# maps dst a -> info b (for route table y)
assert ('index' in b)
assert ('list' in b)
except:
d = None
# add dict to routedata
if d: routedata.update(d)
# get next input
s = f.readline()
done = not(s)
f.close()
return routedata
def get_topology(options, numnodes):
"""Get/create topology."""
# load topology from file
if options.usetopo:
topofile = options.usetopo
topo = read_topo(options, topofile)
border = topo['border']
layout = topo['layout']
xmin, xmax, ymin, ymax = border[:4]
assert (len(layout)>=numnodes)
return topo
# create new topology
assert (options.xmin<=options.xmax)
assert (options.ymin<=options.ymax)
xmin, xmax = options.xmin, options.xmax
ymin, ymax = options.ymin, options.ymax
border = (xmin, xmax, ymin, ymax)
# use uniform distribution for layout
xpos = np.random.uniform(xmin, xmax, numnodes)
ypos = np.random.uniform(ymin, ymax, numnodes)
layout = [(xpos[k],ypos[k]) for k in range(numnodes)]
# verify layout parameters
assert (len(layout)>=numnodes)
topo = {'border':border, 'layout':layout}
return topo
def set_routing(options, nodelist):
"""Set routing tables if needed."""
if not options.useroute: return
routefile = options.useroute
rdata = read_route(options, routefile)
for n in nodelist:
addr = n.net.address
if addr not in rdata: continue
for dst, data in rdata[addr].items():
paths = data['list']
for c,ts,nh in paths:
n.net.addroute(dst, nexthop=nh, cost=c)
return rdata
def run_experiment(options):
# record start time
starttime = time.time()
# initialize RNG
if RNG_INIT: RNG_init()
# set SIMULATION parameters
mon = Element(tracename="MON")
verbose = options.verbose
stoptime = 2.0
if not (options.stop<0): stoptime = options.stop
stoptime *= 1.05 # allow events around stoptime to finish
simargs = {'verbose':verbose}
# set EXPERIMENT parameters
ntx, nrx = 1, 1
numnodes = options.numnodes
nconnect = options.nconnect
assert (nconnect>0)
assert (numnodes>=2*nconnect)
# set CHANNEL parameters
alpha = options.alpha
modeltype = options.tgnmodel # default -> LOS Channel
usedoppler = options.usedoppler
usefading = options.usefading
envspeed = options.envspeed
chargs = {'modeltype':modeltype, 'n':alpha, \
'usedoppler':usedoppler, 'usefading':usefading, \
'environmentspeed': envspeed}
chargs.update(simargs)
# set AGENT parameters
mode = options.agent_mode
plen = Agent.DefaultPacketLength
rate = options.rate # transmission rate in packets/second
delay = None
if mode is None: mode = "cbr"
if options.plen>0: plen = options.plen
if (rate>0): delay = 1.0/rate
# set agent delay if not already specified
if delay is None:
cm = Dot11NChannel(**chargs)
chan = Dot11N_Channel(cm.modelnum, nrx, ntx, cm.flags)
delay = 2*chan.coherencetime()
if rate is None: rate = 1.0/delay
agtargs = {'plen': plen, 'mode':mode, 'delay':delay}
# set DSR parameters
rreqrate, datarate = None, None
if 0<=options.rreqrate<8*ntx: rreqrate=options.rreqrate
if 0<=options.datarate<8*ntx: datarate=options.datarate
netargs = {'rreqrate':rreqrate, 'datarate':datarate}
# set other protocol parameters (MAC, ARP, etc.)
useshared = True
arpargs = {'useshared':useshared}
usecsma = False
macargs = {'usecsma':usecsma}
# set phy parameters
Dot11NPHY.usewaveform = options.usewaveform
Dot11NRadio.Ntx, Dot11NRadio.Nrx = ntx, nrx
Dot11NRadio.fomax = options.fomax
cfocorrection = True
if options.disable_cfo_correction: cfocorrection = False
phyargs = {'cfocorrection':cfocorrection}
# set node parameters
nodeargs = {}
nodeargs.update(agtargs)
nodeargs.update(netargs)
nodeargs.update(arpargs)
nodeargs.update(macargs)
nodeargs.update(phyargs)
nodeargs.update(simargs)
############################
# Set Up Simulation
############################
initialize()
# create channel
bidirectional = options.bidirectional
ch = Channel(model=Dot11NChannel, bidirectional=bidirectional, **simargs)
# get topology
topo = get_topology(options, numnodes)
border = topo['border']
layout = topo['layout']
# create nodes
nodelist = []
for k in range(numnodes):
pos = layout[k]
n = Node(pos=pos, **nodeargs)
nodelist.append(n)
n.motion.log("pos", pos=["%.3f"%(p) for p in n.motion.position] )
# connect source/destination pairs
assert (nconnect<len(nodelist))
for k in range(nconnect):
src = nodelist[k] # first N are sources
dst = nodelist[-k-1] # last N are destinations
src.agent.dest = dst.net.address
# set routing tables
set_routing(options, nodelist)
# connect all nodes via channel
for n in nodelist:
for m in nodelist:
if (n is not m):
ch.add_edge(n.cif, m.cif, **chargs)
# create monitor
if options.monitor:
mon = Monitor(period=stoptime/1e4)
mon.start()
############################
# Run Simulation
############################
if options.usetopo:
mon.log("topo", topofile=options.usetopo)
mon.log("model", **chargs)
mon.log("rate", rate="%.5g"%(rate) )
simerror = None
if EXIT_WITH_TRACE:
try:
simulate(until=stoptime)
except Exception, e:
mon.log("SIMERR", error=str(e))
simerror = e
else:
simulate(until=stoptime)
# log remaining trace information
mon.log("stoptime", stoptime="%.6f"%(stoptime))
n = gc.collect()
mon.log("GC", collected=n)
totaltime = time.time() - starttime
t = time.gmtime(totaltime)
mon.log("runtime", runtime="%02d:%02d:%02d (h/m/s)"%(t.tm_hour, t.tm_min, t.tm_sec) )
############################
# Teardown/Cleanup
############################
# print output
sys.stdout.flush()
if options.trace: ch.trace.output()<|fim▁hole|> # write topofile
if options.savetopo:
f = file(options.savetopo, 'w')
f.write("%s\n"%(topo) )
f.close()
# write routefile
if options.saveroute:
# write data
f = file(options.saveroute, 'w')
for n in nodelist:
addr = n.net.address
rdata = {addr: n.net.table.data.copy()}
f.write("%s\n"%(rdata))
f.close()
# if Exception occurred during simulation ...
if simerror: raise simerror
def main():
usage = "%prog [OPTIONS]"
parser = OptionParser(usage=usage)
# simulation parameters
parser.add_option("-v", "--verbose", dest="verbose", type="int", \
default=ROUTING_VERBOSE+1, help="Set verbose level [default=%default].")
parser.add_option("-t", "--trace", dest="trace", action="store_true", \
default=False, help="Output formatted trace to stdout")
parser.add_option("-o", "--output", dest="output", \
default=None, help="Name of output file for trace")
parser.add_option("-s", "--stop", dest="stop", \
type="float", default=2.0, \
help="Run simulation until stop time [default=%default]")
parser.add_option("-m", "--monitor", dest="monitor", action="store_true", \
default=False, help="Enable simulation montior")
# experiment parameters
parser.add_option("-n", "--num-nodes", dest="numnodes", type="int", \
default=50, help="Set number of nodes [default=%default]")
parser.add_option("-c", "--num-connections", dest="nconnect", type="int", \
default=1, help="Set number of active connections [default=%default]")
# agent parameters
parser.add_option("-r", "--rate", dest="rate", type="float", \
default=None, help="Packets/second generated by a source [default=%default]")
parser.add_option("-l", "--packet-length", dest="plen", type="int", \
default=1024, help="Set packet size in bytes [default=%default]")
parser.add_option("", "--agent-mode", dest="agent_mode", \
default=None, help="Specify traffic mode [options=%s]."%(Agent.TrafficModes))
# net parameters
parser.add_option("", "--rreqrate", dest="rreqrate", type="int", \
default=None, help="Set rate index for RREQ in DSR [default=%default]")
parser.add_option("", "--datarate", dest="datarate", type="int", \
default=None, help="Set rate index for non-RREQ packets in DSR [default=%default]")
# mac parameters
# phy parameters
parser.add_option("", "--mcs", dest="mcs", type="int", \
default=0, help="Set rate index for MCS [default=%default]")
parser.add_option("", "--fomax", dest="fomax", \
type="float", default=0.0, \
help="Specify maximum frequency offset in ppm [default=%default]")
parser.add_option("", "--use-waveform", dest="usewaveform", action="store_true", \
default=False, help="Enable waveform-level simulation [default=%default]")
parser.add_option("", "--disable-cfo-correction", \
dest="disable_cfo_correction", action="store_true", \
default=False, help="Disable CFO correction in waveform-level simulation [default=%default]")
# channel parameters
parser.add_option("", "--tgn-model", dest="tgnmodel", \
default=None, help="Specify TGn model.")
parser.add_option("", "--alpha", dest="alpha", type="float", \
default=2.0, help="Specify pathloss exponent [default=%default]")
parser.add_option("", "--use-doppler", dest="usedoppler", action="store_true", \
default=False, help="Enable doppler filter for fading in TGn channel model.")
parser.add_option("", "--disable-fading", dest="usefading", action="store_false", \
default=True, help="Normalize channel and remove impact of fading on pathloss in TGn channel model.")
parser.add_option("-E", "--environment-speed", dest="envspeed", type="float", \
default=1.2, help="Environmental speed in (km/hr) [default=%default]")
parser.add_option("", "--bidirectional-channel", dest="bidirectional", action="store_true", \
default=False, help="Use bidirectional links in channel [default=%default]")
# topology/layout parameters
parser.add_option("", "--xmin", dest="xmin", type="float", \
default=0.0, help="Set x-axis left boundary [default=%default]")
parser.add_option("", "--xmax", dest="xmax", type="float", \
default=500.0, help="Set x-axis right boundary [default=%default]")
parser.add_option("", "--ymin", dest="ymin", type="float", \
default=0.0, help="Set y-axis lower boundary [default=%default]")
parser.add_option("", "--ymax", dest="ymax", type="float", \
default=500.0, help="Set y-axis upper boundary [default=%default]")
parser.add_option("", "--use-topo", dest="usetopo", \
default=None, help="Specify topology file instead of generating random topology.")
parser.add_option("", "--save-topo", dest="savetopo", \
default=None, help="Save topology to file.")
# routing parameters
parser.add_option("", "--use-route", dest="useroute", \
default=None, help="Specify routing file to initialize route tables.")
parser.add_option("", "--save-route", dest="saveroute", \
default=None, help="Save route tables to file.")
(options, args) = parser.parse_args()
if len(args)>0:
print "Invalid number of arguments."
parser.print_help()
raise SystemExit
run_experiment(options)
if __name__ == '__main__':
main()<|fim▁end|> |
# write tracefile
if options.output is not None: ch.trace.write(options.output)
|
<|file_name|>sql_column_metadata_codec.py<|end_file_name|><|fim▁begin|>from hazelcast.protocol.builtin import FixSizedTypesCodec, CodecUtil
from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import END_FRAME_BUF, END_FINAL_FRAME_BUF, SIZE_OF_FRAME_LENGTH_AND_FLAGS, create_initial_buffer_custom
from hazelcast.sql import SqlColumnMetadata
from hazelcast.protocol.builtin import StringCodec
_TYPE_ENCODE_OFFSET = 2 * SIZE_OF_FRAME_LENGTH_AND_FLAGS
_TYPE_DECODE_OFFSET = 0
_NULLABLE_ENCODE_OFFSET = _TYPE_ENCODE_OFFSET + INT_SIZE_IN_BYTES
_NULLABLE_DECODE_OFFSET = _TYPE_DECODE_OFFSET + INT_SIZE_IN_BYTES
_INITIAL_FRAME_SIZE = _NULLABLE_ENCODE_OFFSET + BOOLEAN_SIZE_IN_BYTES - SIZE_OF_FRAME_LENGTH_AND_FLAGS
class SqlColumnMetadataCodec:
@staticmethod
def encode(buf, sql_column_metadata, is_final=False):
initial_frame_buf = create_initial_buffer_custom(_INITIAL_FRAME_SIZE)
FixSizedTypesCodec.encode_int(initial_frame_buf, _TYPE_ENCODE_OFFSET, sql_column_metadata.type)
FixSizedTypesCodec.encode_boolean(initial_frame_buf, _NULLABLE_ENCODE_OFFSET, sql_column_metadata.nullable)
buf.extend(initial_frame_buf)
StringCodec.encode(buf, sql_column_metadata.name)
if is_final:
buf.extend(END_FINAL_FRAME_BUF)
else:
buf.extend(END_FRAME_BUF)
@staticmethod
def decode(msg):
msg.next_frame()
initial_frame = msg.next_frame()
type = FixSizedTypesCodec.decode_int(initial_frame.buf, _TYPE_DECODE_OFFSET)
is_nullable_exists = False
nullable = False
if len(initial_frame.buf) >= _NULLABLE_DECODE_OFFSET + BOOLEAN_SIZE_IN_BYTES:
nullable = FixSizedTypesCodec.decode_boolean(initial_frame.buf, _NULLABLE_DECODE_OFFSET)
is_nullable_exists = True
name = StringCodec.decode(msg)
CodecUtil.fast_forward_to_end_frame(msg)<|fim▁hole|> return SqlColumnMetadata(name, type, is_nullable_exists, nullable)<|fim▁end|> | |
<|file_name|>parse_iso_20022_files.py<|end_file_name|><|fim▁begin|>import logging
from django.core.management.base import BaseCommand
from payment.postfinance_connector import ISO2022Parser
log = logging.getLogger('tq')
class Command(BaseCommand):
help = '(re)parse ISO 20022 files, ignoring duplicates'
def add_arguments(self, parser):
parser.add_argument(
'--dry-run',
action='store_true',
dest='dry_run',
default=False,
help='dry run',
)<|fim▁hole|> '--reparse',
action='store_true',
dest='reparse',
default=False,
help='parse file also if already processed',
)
def handle(self, *args, **options):
log.info('run management command: {}'.format(__file__))
parser = ISO2022Parser()
count = parser.parse(reparse=options['reparse'], dry_run=options['dry_run'])
log.info('found and parsed {} new transactions'.format(count))<|fim▁end|> | parser.add_argument( |
<|file_name|>Device.cpp<|end_file_name|><|fim▁begin|>#include "Common.h"
#include "Script.h"
#include <atlimage.h>
#include <hidsdi.h>
#include <SetupAPI.h>
#define HID_SFIP 'SFIP'
#define HID_X52P 'X52P'
#define HID_UNKN 'UNKN'
// CHECKME : Do we still need all this code since we now have a GetSerialNumber() in the DO API?
DeviceManager *DevMan = DeviceManager::GetInstance();
SaitekDevice HID[HID_COUNT];
int HIDCount = HID_EMPTY;
int ToDeviceShortName(const char *type) {
if (strcmp(type, "SFIP") == 0)
return HID_SFIP;
if (strcmp(type, "X52P") == 0)
return HID_X52P;
return HID_UNKN;
}
int GetDeviceShortName(GUID type) {
if (type == DeviceType_Fip)
return HID_SFIP;
if (type == DeviceType_X52Pro)
return HID_X52P;
return HID_UNKN;
}
const char *GetDeviceStringName(GUID type) {
if (type == DeviceType_Fip)
return "SFIP";
if (type == DeviceType_X52Pro)
return "X52P";
return "UNKN";
}
int HIDLookupByType(const char *type, int index) {
int count = 1; // Index starts at 1 in Lua
int dev = ToDeviceShortName(type);
for (int i = 0; i < HIDCount; i++) {
if (GetDeviceShortName(HID[i].type) == dev/* && HID[i].isActive*/) {
if (count++ == index)
return i;
}
}
return HID_NOTFOUND;
}
int HIDLookupByIndex(int index) {
int count = 1; // Index starts at 1 in Lua
int dev = GetDeviceShortName(HID[index].type);
for (int i = 0; i < HIDCount; i++) {
if (GetDeviceShortName(HID[i].type) == dev/* && HID[i].isActive*/) {
if (index == i)
return count;
count++;
}
}
return HID_NOTFOUND;
}
static void CALLBACK DO_Enumerate(void* hDevice, void* pCtxt) {
DevMan->HandleDeviceChange(hDevice, true);
}
static void CALLBACK DO_DeviceChange(void* hDevice, bool bAdded, void* pCtxt) {
int index = DevMan->HandleDeviceChange(hDevice, bAdded);
LuaMan->CallDeviceChangeCallbacks(index, bAdded);
}
void DeviceManager::Initialize() {
_lockInit.Acquire();
if (_initializedCounter > 0) {
_initializedCounter++;
_lockInit.Release();
return;
}
// Initialize...
memset(HID, 0, sizeof(SaitekDevice) * HID_COUNT);
HIDCount = HID_EMPTY;
//Initialize DirectInput
HRESULT hdi = DirectInput8Create(GetModuleHandle(NULL), DIRECTINPUT_VERSION, IID_IDirectInput8, (LPVOID *)&this->_di, NULL);
if (!SUCCEEDED(hdi)) {
_lockInit.Release();
return;
}
//Initialize Saitek DirectOutput
_do = new CDirectOutput();
HRESULT hdo = _do->Initialize(L"" LUALIB_IDENT);
if (!SUCCEEDED(hdo)) {
_di->Release();
_lockInit.Release();
return;
}
// Register callbacks
HRESULT h1 = _do->Enumerate((Pfn_DirectOutput_EnumerateCallback)DO_Enumerate, NULL);
HRESULT h2 = _do->RegisterDeviceCallback((Pfn_DirectOutput_DeviceChange)DO_DeviceChange, NULL);
// Everything OK
_initializedCounter = 1;
_lockInit.Release();
}
void DeviceManager::Release() {
_lockInit.Acquire();
if (_initializedCounter-- > 1) {
_lockInit.Release();
return;
}
_do->Deinitialize();
_di->Release();
_initializedCounter = 0;
_lockInit.Release();
}
void DeviceManager::GetDeviceInfo(void *hDevice, DeviceData &dd) {
GUID gi;
HRESULT hr = DO()->GetDeviceInstance(hDevice, &gi);
GetDeviceInfo(gi, dd);
}
void DeviceManager::GetDeviceInfo(const GUID &iid, DeviceData &dd) {
SP_DEVINFO_DATA DeviceInfoData;
SP_DEVICE_INTERFACE_DATA did;
struct { DWORD cbSize; TCHAR DevicePath[256]; } ciddd;
TCHAR s[64];
GUID HidGuid;
//Empty
dd.instanceID[0] = 0;
dd.name[0] = 0;
dd.serialNumber[0] = 0;
// Try to create a device
LPDIRECTINPUTDEVICE8 pDevice;
HRESULT hd = DevMan->DI()->CreateDevice(iid, &pDevice, NULL);
if (FAILED(hd)) {
return;
}
// Get the GUID and Path
DIPROPGUIDANDPATH h;
h.diph.dwSize = sizeof(DIPROPGUIDANDPATH);
h.diph.dwHeaderSize = sizeof(DIPROPHEADER);
h.diph.dwObj = 0;
h.diph.dwHow = DIPH_DEVICE;
HRESULT hp = pDevice->GetProperty(DIPROP_GUIDANDPATH, (LPDIPROPHEADER)&h);
if (FAILED(hd))
return;
// Change # to \ to match structure of instance ID
for (size_t i = 0; i < wcslen(h.wszPath); i++) {
if (h.wszPath[i] == L'#') {
h.wszPath[i] = L'\\';
}
}
// Prepare enumeration
HidD_GetHidGuid(&HidGuid);
HDEVINFO hdi = SetupDiGetClassDevs(&HidGuid, NULL, NULL, DIGCF_PRESENT|DIGCF_DEVICEINTERFACE);
DeviceInfoData.cbSize = sizeof(SP_DEVINFO_DATA);
did.cbSize = sizeof(SP_DEVICE_INTERFACE_DATA);
ciddd.cbSize = sizeof(SP_INTERFACE_DEVICE_DETAIL_DATA);
for (int i = 0; SetupDiEnumDeviceInterfaces(hdi, 0, &HidGuid, i, &did); i++) {
if (!SetupDiGetDeviceInterfaceDetail(hdi, &did, PSP_INTERFACE_DEVICE_DETAIL_DATA(&ciddd), sizeof(ciddd.DevicePath), 0, &DeviceInfoData))
continue;
if (!SetupDiGetDeviceInstanceId(hdi, &DeviceInfoData, s, sizeof(s), 0))
continue;
_wcslwr_s(s);
if(!wcsstr(h.wszPath, s))
continue;
strncpy_s(dd.instanceID, CT2A(s), sizeof(dd.instanceID) - 1);
HANDLE h = CreateFile(ciddd.DevicePath, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, NULL, NULL);
if (HidD_GetProductString(h, s, sizeof(s)))
strncpy_s(dd.name, CT2A(s), sizeof(dd.name) - 1);
if (HidD_GetSerialNumberString(h, s, sizeof(s)))
strncpy_s(dd.serialNumber, CT2A(s), sizeof(dd.serialNumber) - 1);
CloseHandle(h);
}
SetupDiDestroyDeviceInfoList(hdi);
}
int DeviceManager::Prepare(void *hDevice) {
if (HIDCount == HID_COUNT)
return HID_NOTFOUND;
int index = HIDCount++;
HID[index].hDevice = hDevice;
return index;
}
void DeviceManager::Set(int index) {
void *hDevice = HID[index].hDevice;
GUID gt, gi;
DeviceData dd;
GetDeviceInfo(hDevice, dd);
DO()->GetDeviceType(hDevice, >);
DO()->GetDeviceInstance(hDevice, &gi);
HID[index].type = gt;
HID[index].instance = gi;
strcpy_s(HID[index].instanceID, dd.instanceID);
strcpy_s(HID[index].name, dd.name);
strcpy_s(HID[index].serialNumber, dd.serialNumber);
}
int DeviceManager::HandleDeviceChange(void *hDevice, bool bAdded) {
int index = LookupByHandle(hDevice);
_lockHID.Acquire();
if (bAdded) {
if (index == HID_NOTFOUND)
index = LookupByDeviceInfo(hDevice, false);
if (index == HID_NOTFOUND)
index = Prepare(hDevice);
}
if (index != HID_NOTFOUND) {
HID[index].isActive = bAdded;
if (bAdded) {
HID[index].hDevice = hDevice;
Set(index);<|fim▁hole|>
return index;
}
int DeviceManager::LookupByHandle(void* hDevice) {
_lockHID.Acquire();
for (int i = 0; i < HIDCount; i++) {
if (hDevice == HID[i].hDevice) {
_lockHID.Release();
return i;
}
}
_lockHID.Release();
return HID_NOTFOUND;
}
int DeviceManager::LookupByDeviceInfo(void *hDevice, bool isActive) {
GUID gt;
DeviceData dd;
DO()->GetDeviceType(hDevice, >);
GetDeviceInfo(hDevice, dd);
return LookupByDeviceInfo(gt, dd, isActive);
}
int DeviceManager::LookupByDeviceInfo(GUID &type, DeviceData &dd, bool isActive) {
for (int i = 0; i < HIDCount; i++) {
if (HID[i].isActive == isActive && HID[i].type == type &&
strcmp(dd.instanceID, HID[i].instanceID) == 0 &&
strcmp(dd.serialNumber, HID[i].serialNumber) == 0) {
return i;
}
}
return HID_NOTFOUND;
}<|fim▁end|> | }
}
_lockHID.Release(); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.