prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>DBIndex.java<|end_file_name|><|fim▁begin|>package com.github.esadmin.meta.model;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import javax.persistence.Table;
import org.guess.core.orm.IdEntity;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* 索引对象Entity
* @author Joe.zhang
* @version 2015-12-08
*/
@Entity
@Table(name = "meta_dbindex")
@JsonIgnoreProperties(value = {"hibernateLazyInitializer","handler", "columns"})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class DBIndex extends IdEntity {
/**
* 数据表
*/
@ManyToMany(cascade = { CascadeType.PERSIST, CascadeType.MERGE }, targetEntity = DBTable.class)
@JoinTable(name = "meta_table_index", joinColumns = { @JoinColumn(name = "index_id") }, inverseJoinColumns = { @JoinColumn(name = "table_id") })
@JsonIgnoreProperties(value = { "hibernateLazyInitializer","handler","datasource"})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
private Set<DBTable> tables = new HashSet<DBTable>(0);
/**
* 索引库名
*/
@Column(name="index_name")
private String index_name;
/**
* 索引表名
*/
@Column(name="type_name")
private String type_name;
/**
* 索引类别
*/
@Column(name="index_type")
private Integer indexType;
/**
* 建立者
*/
@Column(name="createby_id")
private Long createbyId;
/**
* 更新者
*/
@Column(name="updateby_id")
private Long updatebyId;
/**
* 建立世间
*/
@Column(name="create_date")
private Date createDate;
/**
* 更新世间
*/
@Column(name="update_date")
private Date updateDate;
/**
* 备注
*/
@Column(name="remark")
private String remark;
@OneToMany(targetEntity = DbColumn.class, fetch = FetchType.LAZY, cascade = CascadeType.ALL,mappedBy="dbindex")
@OrderBy("id ASC")
private Set<DbColumn> columns;
@Column(name="check_label")
private Integer checkLabel;
public Integer getCheckLabel() {
return checkLabel;
}
public void setCheckLabel(Integer checkLabel) {
this.checkLabel = checkLabel;
}
public Set<DBTable> getTables() {
return tables;
}
public void setTables(Set<DBTable> tables) {
this.tables = tables;
}
public String getIndex_name() {
return index_name;
}
public void setIndex_name(String index_name) {
this.index_name = index_name;
}
public String getType_name() {
return type_name;
}
public void setType_name(String type_name) {
this.type_name = type_name;
}
public Integer getIndexType() {
return indexType;
}
public void setIndexType(Integer indexType) {
this.indexType = indexType;
}
public Long getCreatebyId() {
return createbyId;
}
public void setCreatebyId(Long createbyId) {
this.createbyId = createbyId;
}
public Set<DbColumn> getColumns() {
return columns;
}
public void setColumns(Set<DbColumn> columns) {
this.columns = columns;
}
public Long getUpdatebyId() {
return updatebyId;
}
public void setUpdatebyId(Long updatebyId) {
this.updatebyId = updatebyId;
}
public Date getCreateDate() {
return createDate;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
public Date getUpdateDate() {
return updateDate;
}
public void setUpdateDate(Date updateDate) {
this.updateDate = updateDate;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>upstart_test.go<|end_file_name|><|fim▁begin|>// Copyright 2012, 2013 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package upstart_test
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"testing"
gc "launchpad.net/gocheck"
jc "launchpad.net/juju-core/testing/checkers"
"launchpad.net/juju-core/testing/testbase"
"launchpad.net/juju-core/upstart"
"launchpad.net/juju-core/utils"
)
func Test(t *testing.T) { gc.TestingT(t) }
type UpstartSuite struct {
testbase.LoggingSuite
testPath string
service *upstart.Service
}
var _ = gc.Suite(&UpstartSuite{})
func (s *UpstartSuite) SetUpTest(c *gc.C) {
s.testPath = c.MkDir()
s.PatchEnvPathPrepend(s.testPath)
s.PatchValue(&upstart.InstallStartRetryAttempts, utils.AttemptStrategy{})
s.service = &upstart.Service{Name: "some-service", InitDir: c.MkDir()}
_, err := os.Create(filepath.Join(s.service.InitDir, "some-service.conf"))
c.Assert(err, gc.IsNil)
}
var checkargs = `
#!/bin/bash --norc
if [ "$1" != "--system" ]; then
exit 255
fi
if [ "$2" != "some-service" ]; then
exit 255
fi
if [ "$3" != "" ]; then
exit 255
fi
`[1:]
func (s *UpstartSuite) MakeTool(c *gc.C, name, script string) {
path := filepath.Join(s.testPath, name)
err := ioutil.WriteFile(path, []byte(checkargs+script), 0755)
c.Assert(err, gc.IsNil)
}
func (s *UpstartSuite) StoppedStatus(c *gc.C) {
s.MakeTool(c, "status", `echo "some-service stop/waiting"`)
}
func (s *UpstartSuite) RunningStatus(c *gc.C) {
s.MakeTool(c, "status", `echo "some-service start/running, process 123"`)
}
func (s *UpstartSuite) TestInitDir(c *gc.C) {
svc := upstart.NewService("blah")
c.Assert(svc.InitDir, gc.Equals, "/etc/init")
}
func (s *UpstartSuite) TestInstalled(c *gc.C) {
c.Assert(s.service.Installed(), gc.Equals, true)
err := os.Remove(filepath.Join(s.service.InitDir, "some-service.conf"))
c.Assert(err, gc.IsNil)
c.Assert(s.service.Installed(), gc.Equals, false)
}
func (s *UpstartSuite) TestRunning(c *gc.C) {
s.MakeTool(c, "status", "exit 1")
c.Assert(s.service.Running(), gc.Equals, false)
s.MakeTool(c, "status", `echo "GIBBERISH NONSENSE"`)
c.Assert(s.service.Running(), gc.Equals, false)
s.RunningStatus(c)
c.Assert(s.service.Running(), gc.Equals, true)
}
func (s *UpstartSuite) TestStart(c *gc.C) {
s.RunningStatus(c)
s.MakeTool(c, "start", "exit 99")
c.Assert(s.service.Start(), gc.IsNil)
s.StoppedStatus(c)
c.Assert(s.service.Start(), gc.ErrorMatches, ".*exit status 99.*")
s.MakeTool(c, "start", "exit 0")
c.Assert(s.service.Start(), gc.IsNil)
}
func (s *UpstartSuite) TestStop(c *gc.C) {
s.StoppedStatus(c)
s.MakeTool(c, "stop", "exit 99")
c.Assert(s.service.Stop(), gc.IsNil)
s.RunningStatus(c)
c.Assert(s.service.Stop(), gc.ErrorMatches, ".*exit status 99.*")
s.MakeTool(c, "stop", "exit 0")
c.Assert(s.service.Stop(), gc.IsNil)
}
func (s *UpstartSuite) TestRemoveMissing(c *gc.C) {
err := os.Remove(filepath.Join(s.service.InitDir, "some-service.conf"))
c.Assert(err, gc.IsNil)
c.Assert(s.service.StopAndRemove(), gc.IsNil)
}
func (s *UpstartSuite) TestRemoveStopped(c *gc.C) {
s.StoppedStatus(c)
c.Assert(s.service.StopAndRemove(), gc.IsNil)
_, err := os.Stat(filepath.Join(s.service.InitDir, "some-service.conf"))
c.Assert(err, jc.Satisfies, os.IsNotExist)
}
func (s *UpstartSuite) TestRemoveRunning(c *gc.C) {
s.RunningStatus(c)
s.MakeTool(c, "stop", "exit 99")
c.Assert(s.service.StopAndRemove(), gc.ErrorMatches, ".*exit status 99.*")
_, err := os.Stat(filepath.Join(s.service.InitDir, "some-service.conf"))
c.Assert(err, gc.IsNil)
s.MakeTool(c, "stop", "exit 0")
c.Assert(s.service.StopAndRemove(), gc.IsNil)
_, err = os.Stat(filepath.Join(s.service.InitDir, "some-service.conf"))
c.Assert(err, jc.Satisfies, os.IsNotExist)
}
func (s *UpstartSuite) TestStopAndRemove(c *gc.C) {
s.RunningStatus(c)
s.MakeTool(c, "stop", "exit 99")
// StopAndRemove will fail, as it calls stop.
c.Assert(s.service.StopAndRemove(), gc.ErrorMatches, ".*exit status 99.*")
_, err := os.Stat(filepath.Join(s.service.InitDir, "some-service.conf"))
c.Assert(err, gc.IsNil)
// Plain old Remove will succeed.
c.Assert(s.service.Remove(), gc.IsNil)
_, err = os.Stat(filepath.Join(s.service.InitDir, "some-service.conf"))
c.Assert(err, jc.Satisfies, os.IsNotExist)
}
func (s *UpstartSuite) TestInstallErrors(c *gc.C) {
conf := &upstart.Conf{}
check := func(msg string) {
c.Assert(conf.Install(), gc.ErrorMatches, msg)
_, err := conf.InstallCommands()
c.Assert(err, gc.ErrorMatches, msg)
}
check("missing Name")
conf.Name = "some-service"
check("missing InitDir")
conf.InitDir = c.MkDir()
check("missing Desc")
conf.Desc = "this is an upstart service"
check("missing Cmd")
}
const expectStart = `description "this is an upstart service"
author "Juju Team <[email protected]>"
start on runlevel [2345]
stop on runlevel [!2345]
respawn
normal exit 0
`
func (s *UpstartSuite) dummyConf(c *gc.C) *upstart.Conf {
return &upstart.Conf{
Service: *s.service,
Desc: "this is an upstart service",
Cmd: "do something",
}
}<|fim▁hole|>
cmds, err := conf.InstallCommands()
c.Assert(err, gc.IsNil)
c.Assert(cmds, gc.DeepEquals, []string{
"cat >> " + expectPath + " << 'EOF'\n" + expectContent + "EOF\n",
"start some-service",
})
s.MakeTool(c, "start", "exit 99")
err = conf.Install()
c.Assert(err, gc.ErrorMatches, ".*exit status 99.*")
s.MakeTool(c, "start", "exit 0")
err = conf.Install()
c.Assert(err, gc.IsNil)
content, err := ioutil.ReadFile(expectPath)
c.Assert(err, gc.IsNil)
c.Assert(string(content), gc.Equals, expectContent)
}
func (s *UpstartSuite) TestInstallSimple(c *gc.C) {
conf := s.dummyConf(c)
s.assertInstall(c, conf, "\n\nexec do something\n")
}
func (s *UpstartSuite) TestInstallOutput(c *gc.C) {
conf := s.dummyConf(c)
conf.Out = "/some/output/path"
s.assertInstall(c, conf, "\n\nexec do something >> /some/output/path 2>&1\n")
}
func (s *UpstartSuite) TestInstallEnv(c *gc.C) {
conf := s.dummyConf(c)
conf.Env = map[string]string{"FOO": "bar baz", "QUX": "ping pong"}
s.assertInstall(c, conf, `env FOO="bar baz"
env QUX="ping pong"
exec do something
`)
}
func (s *UpstartSuite) TestInstallLimit(c *gc.C) {
conf := s.dummyConf(c)
conf.Limit = map[string]string{"nofile": "65000 65000", "nproc": "20000 20000"}
s.assertInstall(c, conf, `
limit nofile 65000 65000
limit nproc 20000 20000
exec do something
`)
}
func (s *UpstartSuite) TestInstallAlreadyRunning(c *gc.C) {
pathTo := func(name string) string {
return filepath.Join(s.testPath, name)
}
s.MakeTool(c, "status-stopped", `echo "some-service stop/waiting"`)
s.MakeTool(c, "status-started", `echo "some-service start/running, process 123"`)
s.MakeTool(c, "stop", fmt.Sprintf(
"rm %s; ln -s %s %s",
pathTo("status"), pathTo("status-stopped"), pathTo("status"),
))
s.MakeTool(c, "start", fmt.Sprintf(
"rm %s; ln -s %s %s",
pathTo("status"), pathTo("status-started"), pathTo("status"),
))
err := utils.Symlink(pathTo("status-started"), pathTo("status"))
c.Assert(err, gc.IsNil)
conf := s.dummyConf(c)
err = conf.Install()
c.Assert(err, gc.IsNil)
c.Assert(&conf.Service, jc.Satisfies, (*upstart.Service).Running)
}<|fim▁end|> |
func (s *UpstartSuite) assertInstall(c *gc.C, conf *upstart.Conf, expectEnd string) {
expectContent := expectStart + expectEnd
expectPath := filepath.Join(conf.InitDir, "some-service.conf") |
<|file_name|>component.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { combineReducers } from 'redux';
import { connect } from 'react-redux';<|fim▁hole|>// Count reducer
const count = (state = 10, action) => {
switch (action.type) {
case 'increment':
return state + 1;
case 'decrement':
return state -1;
default:
return state;
}
}
// The store reducer for our Counter component.
export const reducer = combineReducers({
count,
});
// A simple React component
class Counter extends React.Component {
render() {
return React.createElement(
'div',
null,
this.props.count,
React.createElement(
'button',
{ onClick: () => this.props.dispatch({ type: 'increment' }) },
'inc'
),
React.createElement(
'button',
{ onClick: () => this.props.dispatch({ type: 'decrement' }) },
'dec'
)
);
}
}
export default connect(state => ({...state}))(Counter);<|fim▁end|> | |
<|file_name|>test_openmp.py<|end_file_name|><|fim▁begin|>import unittest
from distutils.errors import CompileError
from pythran.tests import TestFromDir
import os
import pythran
from pythran.syntax import PythranSyntaxError
from pythran.spec import Spec
class TestOpenMP(TestFromDir):
path = os.path.join(os.path.dirname(__file__), "openmp")
class TestOpenMP4(TestFromDir):
path = os.path.join(os.path.dirname(__file__), "openmp.4")
<|fim▁hole|> @staticmethod
def interface(name, file=None):
return Spec({name: []})
@staticmethod
def extract_runas(name, filepath):
return ['#runas {}()'.format(name)]
class TestOpenMPLegacy(TestFromDir):
'''
Test old style OpenMP constructs, not using comments but strings
and relying on function-scope locals
'''
path = os.path.join(os.path.dirname(__file__), "openmp.legacy")
@staticmethod
def interface(name, file=None):
return Spec({name: []})
@staticmethod
def extract_runas(name, filepath):
return ['#runas {}()'.format(name)]
# only activate OpenMP tests if the underlying compiler supports OpenMP
try:
pythran.compile_cxxcode("omp", '#include <omp.h>',
extra_compile_args=['-fopenmp'],
extra_link_args=['-fopenmp'])
import omp
if '-fopenmp' in pythran.config.cfg.get('compiler', 'ldflags'):
TestOpenMP4.populate(TestOpenMP4)
TestOpenMP.populate(TestOpenMP)
TestOpenMPLegacy.populate(TestOpenMPLegacy)
except PythranSyntaxError:
raise
except (CompileError, ImportError):
pass
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>test_truncnorm.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as plt
<|fim▁hole|>def test_truncnorm():
alpha0 = [0, 2]
# alpha1 = [2]
p = TruncNorm(alpha0=alpha0, alpha1=None, number_of_alpha_levels=7)
print(p)
print(p.df)
print(p.df.values.tolist())
ref = [[0.0, 0.0, 2.0], [0.16666666666666666, 0.36898774621220265, 1.6310122537877976],
[0.3333333333333333, 0.505893899432985, 1.4941061005670149], [0.5, 0.6075291624853785, 1.3924708375146215],
[0.6666666666666666, 0.6998279866511387, 1.3001720133488615],
[0.8333333333333333, 0.7987198538648325, 1.2012801461351676], [1.0, 1.0, 1.0]]
assert np.allclose(p.df.values.tolist(), ref)
def plot():
alpha0 = [0, 2]
p = TruncNorm(alpha0=alpha0, alpha1=None, number_of_alpha_levels=17, std=(alpha0[1] - alpha0[0]) / 6.)
p.plot(show=True)
plt.show()<|fim▁end|> | import numpy as np
from phuzzy.mpl import TruncNorm
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=W0613,no-member,attribute-defined-outside-init
"""
Some "standard" instruments to collect additional info about workload execution.
.. note:: The run() method of a Workload may perform some "boilerplate" as well as
the actual execution of the workload (e.g. it may contain UI automation
needed to start the workload). This "boilerplate" execution will also
be measured by these instruments. As such, they are not suitable for collected
precise data about specific operations.
"""
import os
import re
import logging
import time
import tarfile
from itertools import izip, izip_longest
from subprocess import CalledProcessError
from wlauto import Instrument, Parameter
from wlauto.core import signal
from wlauto.exceptions import DeviceError, ConfigError
from wlauto.utils.misc import diff_tokens, write_table, check_output, as_relative
from wlauto.utils.misc import ensure_file_directory_exists as _f
from wlauto.utils.misc import ensure_directory_exists as _d
from wlauto.utils.android import ApkInfo
from wlauto.utils.types import list_of_strings
logger = logging.getLogger(__name__)
class SysfsExtractor(Instrument):
name = 'sysfs_extractor'
description = """
Collects the contest of a set of directories, before and after workload execution
and diffs the result.
"""
mount_command = 'mount -t tmpfs -o size={} tmpfs {}'
extract_timeout = 30
tarname = 'sysfs.tar'
DEVICE_PATH = 0
BEFORE_PATH = 1
AFTER_PATH = 2
DIFF_PATH = 3
parameters = [
Parameter('paths', kind=list_of_strings, mandatory=True,
description="""A list of paths to be pulled from the device. These could be directories
as well as files.""",
global_alias='sysfs_extract_dirs'),
Parameter('use_tmpfs', kind=bool, default=None,
description="""
Specifies whether tmpfs should be used to cache sysfile trees and then pull them down
as a tarball. This is significantly faster then just copying the directory trees from
the device directly, bur requres root and may not work on all devices. Defaults to
``True`` if the device is rooted and ``False`` if it is not.
"""),
Parameter('tmpfs_mount_point', default=None,
description="""Mount point for tmpfs partition used to store snapshots of paths."""),
Parameter('tmpfs_size', default='32m',
description="""Size of the tempfs partition."""),
]
def initialize(self, context):
if not self.device.is_rooted and self.use_tmpfs: # pylint: disable=access-member-before-definition
raise ConfigError('use_tempfs must be False for an unrooted device.')
elif self.use_tmpfs is None: # pylint: disable=access-member-before-definition
self.use_tmpfs = self.device.is_rooted
if self.use_tmpfs:<|fim▁hole|> self.on_device_before = self.device.path.join(self.tmpfs_mount_point, 'before')
self.on_device_after = self.device.path.join(self.tmpfs_mount_point, 'after')
if not self.device.file_exists(self.tmpfs_mount_point):
self.device.execute('mkdir -p {}'.format(self.tmpfs_mount_point), as_root=True)
self.device.execute(self.mount_command.format(self.tmpfs_size, self.tmpfs_mount_point),
as_root=True)
def setup(self, context):
before_dirs = [
_d(os.path.join(context.output_directory, 'before', self._local_dir(d)))
for d in self.paths
]
after_dirs = [
_d(os.path.join(context.output_directory, 'after', self._local_dir(d)))
for d in self.paths
]
diff_dirs = [
_d(os.path.join(context.output_directory, 'diff', self._local_dir(d)))
for d in self.paths
]
self.device_and_host_paths = zip(self.paths, before_dirs, after_dirs, diff_dirs)
if self.use_tmpfs:
for d in self.paths:
before_dir = self.device.path.join(self.on_device_before,
self.device.path.dirname(as_relative(d)))
after_dir = self.device.path.join(self.on_device_after,
self.device.path.dirname(as_relative(d)))
if self.device.file_exists(before_dir):
self.device.execute('rm -rf {}'.format(before_dir), as_root=True)
self.device.execute('mkdir -p {}'.format(before_dir), as_root=True)
if self.device.file_exists(after_dir):
self.device.execute('rm -rf {}'.format(after_dir), as_root=True)
self.device.execute('mkdir -p {}'.format(after_dir), as_root=True)
def slow_start(self, context):
if self.use_tmpfs:
for d in self.paths:
dest_dir = self.device.path.join(self.on_device_before, as_relative(d))
if '*' in dest_dir:
dest_dir = self.device.path.dirname(dest_dir)
self.device.execute('{} cp -Hr {} {}'.format(self.device.busybox, d, dest_dir),
as_root=True, check_exit_code=False)
else: # not rooted
for dev_dir, before_dir, _, _ in self.device_and_host_paths:
self.device.pull_file(dev_dir, before_dir)
def slow_stop(self, context):
if self.use_tmpfs:
for d in self.paths:
dest_dir = self.device.path.join(self.on_device_after, as_relative(d))
if '*' in dest_dir:
dest_dir = self.device.path.dirname(dest_dir)
self.device.execute('{} cp -Hr {} {}'.format(self.device.busybox, d, dest_dir),
as_root=True, check_exit_code=False)
else: # not using tmpfs
for dev_dir, _, after_dir, _ in self.device_and_host_paths:
self.device.pull_file(dev_dir, after_dir)
def update_result(self, context):
if self.use_tmpfs:
on_device_tarball = self.device.path.join(self.device.working_directory, self.tarname)
on_host_tarball = self.device.path.join(context.output_directory, self.tarname + ".gz")
self.device.execute('{} tar cf {} -C {} .'.format(self.device.busybox,
on_device_tarball,
self.tmpfs_mount_point),
as_root=True)
self.device.execute('chmod 0777 {}'.format(on_device_tarball), as_root=True)
self.device.execute('{} gzip {}'.format(self.device.busybox,
on_device_tarball))
self.device.pull_file(on_device_tarball + ".gz", on_host_tarball)
with tarfile.open(on_host_tarball, 'r:gz') as tf:
tf.extractall(context.output_directory)
self.device.delete_file(on_device_tarball + ".gz")
os.remove(on_host_tarball)
for paths in self.device_and_host_paths:
after_dir = paths[self.AFTER_PATH]
dev_dir = paths[self.DEVICE_PATH].strip('*') # remove potential trailing '*'
if (not os.listdir(after_dir) and
self.device.file_exists(dev_dir) and
self.device.listdir(dev_dir)):
self.logger.error('sysfs files were not pulled from the device.')
self.device_and_host_paths.remove(paths) # Path is removed to skip diffing it
for _, before_dir, after_dir, diff_dir in self.device_and_host_paths:
_diff_sysfs_dirs(before_dir, after_dir, diff_dir)
def teardown(self, context):
self._one_time_setup_done = []
def finalize(self, context):
if self.use_tmpfs:
try:
self.device.execute('umount {}'.format(self.tmpfs_mount_point), as_root=True)
except (DeviceError, CalledProcessError):
# assume a directory but not mount point
pass
self.device.execute('rm -rf {}'.format(self.tmpfs_mount_point),
as_root=True, check_exit_code=False)
def validate(self):
if not self.tmpfs_mount_point: # pylint: disable=access-member-before-definition
self.tmpfs_mount_point = self.device.path.join(self.device.working_directory, 'temp-fs')
def _local_dir(self, directory):
return os.path.dirname(as_relative(directory).replace(self.device.path.sep, os.sep))
class ExecutionTimeInstrument(Instrument):
name = 'execution_time'
description = """
Measure how long it took to execute the run() methods of a Workload.
"""
priority = 15
def __init__(self, device, **kwargs):
super(ExecutionTimeInstrument, self).__init__(device, **kwargs)
self.start_time = None
self.end_time = None
def on_run_start(self, context):
signal.connect(self.get_start_time, signal.BEFORE_WORKLOAD_EXECUTION, priority=self.priority)
signal.connect(self.get_stop_time, signal.AFTER_WORKLOAD_EXECUTION, priority=self.priority)
def get_start_time(self, context):
self.start_time = time.time()
def get_stop_time(self, context):
self.end_time = time.time()
def update_result(self, context):
execution_time = self.end_time - self.start_time
context.result.add_metric('execution_time', execution_time, 'seconds')
class ApkVersion(Instrument):
name = 'apk_version'
description = """
(DEPRECATED) Extracts APK versions for workloads that have them.
This instrument is deprecated and should not be used. It will be removed in
future versions of Workload Automation.
Versions of Android packages are now automatically attached to the results as
"apk_version" classfiers.
"""
def __init__(self, device, **kwargs):
super(ApkVersion, self).__init__(device, **kwargs)
self.apk_info = None
def setup(self, context):
if hasattr(context.workload, 'apk_file'):
self.apk_info = ApkInfo(context.workload.apk_file)
else:
self.apk_info = None
def update_result(self, context):
if self.apk_info:
context.result.add_metric(self.name, self.apk_info.version_name)
class InterruptStatsInstrument(Instrument):
name = 'interrupts'
description = """
Pulls the ``/proc/interrupts`` file before and after workload execution and diffs them
to show what interrupts occurred during that time.
"""
def __init__(self, device, **kwargs):
super(InterruptStatsInstrument, self).__init__(device, **kwargs)
self.before_file = None
self.after_file = None
self.diff_file = None
def setup(self, context):
self.before_file = os.path.join(context.output_directory, 'before', 'proc', 'interrupts')
self.after_file = os.path.join(context.output_directory, 'after', 'proc', 'interrupts')
self.diff_file = os.path.join(context.output_directory, 'diff', 'proc', 'interrupts')
def start(self, context):
with open(_f(self.before_file), 'w') as wfh:
wfh.write(self.device.execute('cat /proc/interrupts'))
def stop(self, context):
with open(_f(self.after_file), 'w') as wfh:
wfh.write(self.device.execute('cat /proc/interrupts'))
def update_result(self, context):
# If workload execution failed, the after_file may not have been created.
if os.path.isfile(self.after_file):
_diff_interrupt_files(self.before_file, self.after_file, _f(self.diff_file))
class DynamicFrequencyInstrument(SysfsExtractor):
name = 'cpufreq'
description = """
Collects dynamic frequency (DVFS) settings before and after workload execution.
"""
tarname = 'cpufreq.tar'
parameters = [
Parameter('paths', mandatory=False, override=True),
]
def setup(self, context):
self.paths = ['/sys/devices/system/cpu']
if self.use_tmpfs:
self.paths.append('/sys/class/devfreq/*') # the '*' would cause problems for adb pull.
super(DynamicFrequencyInstrument, self).setup(context)
def validate(self):
# temp-fs would have been set in super's validate, if not explicitly specified.
if not self.tmpfs_mount_point.endswith('-cpufreq'): # pylint: disable=access-member-before-definition
self.tmpfs_mount_point += '-cpufreq'
def _diff_interrupt_files(before, after, result): # pylint: disable=R0914
output_lines = []
with open(before) as bfh:
with open(after) as ofh:
for bline, aline in izip(bfh, ofh):
bchunks = bline.strip().split()
while True:
achunks = aline.strip().split()
if achunks[0] == bchunks[0]:
diffchunks = ['']
diffchunks.append(achunks[0])
diffchunks.extend([diff_tokens(b, a) for b, a
in zip(bchunks[1:], achunks[1:])])
output_lines.append(diffchunks)
break
else: # new category appeared in the after file
diffchunks = ['>'] + achunks
output_lines.append(diffchunks)
try:
aline = ofh.next()
except StopIteration:
break
# Offset heading columns by one to allow for row labels on subsequent
# lines.
output_lines[0].insert(0, '')
# Any "columns" that do not have headings in the first row are not actually
# columns -- they are a single column where space-spearated words got
# split. Merge them back together to prevent them from being
# column-aligned by write_table.
table_rows = [output_lines[0]]
num_cols = len(output_lines[0])
for row in output_lines[1:]:
table_row = row[:num_cols]
table_row.append(' '.join(row[num_cols:]))
table_rows.append(table_row)
with open(result, 'w') as wfh:
write_table(table_rows, wfh)
def _diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
before_files = []
os.path.walk(before,
lambda arg, dirname, names: arg.extend([os.path.join(dirname, f) for f in names]),
before_files
)
before_files = filter(os.path.isfile, before_files)
files = [os.path.relpath(f, before) for f in before_files]
after_files = [os.path.join(after, f) for f in files]
diff_files = [os.path.join(result, f) for f in files]
for bfile, afile, dfile in zip(before_files, after_files, diff_files):
if not os.path.isfile(afile):
logger.debug('sysfs_diff: {} does not exist or is not a file'.format(afile))
continue
with open(bfile) as bfh, open(afile) as afh: # pylint: disable=C0321
with open(_f(dfile), 'w') as dfh:
for i, (bline, aline) in enumerate(izip_longest(bfh, afh), 1):
if aline is None:
logger.debug('Lines missing from {}'.format(afile))
break
bchunks = re.split(r'(\W+)', bline)
achunks = re.split(r'(\W+)', aline)
if len(bchunks) != len(achunks):
logger.debug('Token length mismatch in {} on line {}'.format(bfile, i))
dfh.write('xxx ' + bline)
continue
if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2) and
(bchunks[0] == achunks[0])):
# if there are only two columns and the first column is the
# same, assume it's a "header" column and do not diff it.
dchunks = [bchunks[0]] + [diff_tokens(b, a) for b, a in zip(bchunks[1:], achunks[1:])]
else:
dchunks = [diff_tokens(b, a) for b, a in zip(bchunks, achunks)]
dfh.write(''.join(dchunks))<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
#
# Copyright (C) 2017 NDP Systèmes (<http://www.ndp-systemes.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#<|fim▁hole|># GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from . import stock_procurement_split<|fim▁end|> | # This program is distributed in the hope that it will be useful,
#
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
<|file_name|>Abilities.js<|end_file_name|><|fim▁begin|>import React from 'react';
import Wrapper from 'common/Wrapper';
import SPELLS from 'common/SPELLS';
import SpellLink from 'common/SpellLink';
import CoreAbilities from 'Parser/Core/Modules/Abilities';
import ISSUE_IMPORTANCE from 'Parser/Core/ISSUE_IMPORTANCE';
class Abilities extends CoreAbilities {
spellbook() {
const combatant = this.combatants.selected;
return [
// Rotational Spells
{
spell: [SPELLS.NEW_MOON, SPELLS.HALF_MOON, SPELLS.FULL_MOON],
category: Abilities.SPELL_CATEGORIES.ROTATIONAL,
cooldown: 15,
isOnGCD: true,
charges: 3,
castEfficiency: {
suggestion: true,
recommendedEfficiency: 0.95,
averageIssueEfficiency: 0.9,
majorIssueEfficiency: 0.85,
extraSuggestion: (
<Wrapper>
Your <SpellLink id={SPELLS.NEW_MOON.id} />, <SpellLink id={SPELLS.HALF_MOON.id} /> and <SpellLink id={SPELLS.FULL_MOON.id} /> cast efficiency can be improved, try keeping yourself at low Moon charges at all times; you should (almost) never be at max (3) charges.
</Wrapper>
),
},
timelineSortIndex: 1,
},
{
spell: SPELLS.STARSURGE_MOONKIN,
category: Abilities.SPELL_CATEGORIES.ROTATIONAL,
isOnGCD: true,
timelineSortIndex: 2,
},
{
spell: SPELLS.STARFALL_CAST,
category: Abilities.SPELL_CATEGORIES.ROTATIONAL,
isOnGCD: true,
timelineSortIndex: 3,
},
{
spell: SPELLS.SOLAR_WRATH_MOONKIN,
category: Abilities.SPELL_CATEGORIES.ROTATIONAL,
isOnGCD: true,
timelineSortIndex: 4,
},
{
spell: SPELLS.LUNAR_STRIKE,
category: Abilities.SPELL_CATEGORIES.ROTATIONAL,
isOnGCD: true,
timelineSortIndex: 5,
},
{
spell: SPELLS.MOONFIRE,
category: Abilities.SPELL_CATEGORIES.ROTATIONAL,
isOnGCD: true,
timelineSortIndex: 6,
},
{
spell: SPELLS.SUNFIRE_CAST,
category: Abilities.SPELL_CATEGORIES.ROTATIONAL,
isOnGCD: true,
timelineSortIndex: 7,
},
{
spell: SPELLS.STELLAR_FLARE_TALENT,
category: Abilities.SPELL_CATEGORIES.ROTATIONAL,
enabled: combatant.hasTalent(SPELLS.STELLAR_FLARE_TALENT.id),
isOnGCD: true,
timelineSortIndex: 8,
},
// Cooldowns
{
spell: SPELLS.INCARNATION_CHOSEN_OF_ELUNE_TALENT,
category: Abilities.SPELL_CATEGORIES.COOLDOWNS,
cooldown: 180,
enabled: combatant.hasTalent(SPELLS.INCARNATION_CHOSEN_OF_ELUNE_TALENT.id),<|fim▁hole|> castEfficiency: {
suggestion: true,
recommendedEfficiency: 0.9,
},
timelineSortIndex: 9,
},
{
spell: SPELLS.CELESTIAL_ALIGNMENT,
category: Abilities.SPELL_CATEGORIES.COOLDOWNS,
cooldown: 180,
enabled: !combatant.hasTalent(SPELLS.INCARNATION_CHOSEN_OF_ELUNE_TALENT.id),
castEfficiency: {
suggestion: true,
recommendedEfficiency: 0.9,
},
timelineSortIndex: 9,
},
{
spell: SPELLS.WARRIOR_OF_ELUNE_TALENT,
category: Abilities.SPELL_CATEGORIES.COOLDOWNS,
cooldown: 48,
enabled: combatant.hasTalent(SPELLS.WARRIOR_OF_ELUNE_TALENT.id),
castEfficiency: {
suggestion: true,
recommendedEfficiency: 0.9,
},
timelineSortIndex: 10,
},
{
spell: SPELLS.FORCE_OF_NATURE_TALENT,
category: Abilities.SPELL_CATEGORIES.COOLDOWNS,
cooldown: 60,
enabled: combatant.hasTalent(SPELLS.FORCE_OF_NATURE_TALENT.id),
isOnGCD: true,
castEfficiency: {
suggestion: true,
recommendedEfficiency: 0.9,
},
timelineSortIndex: 10,
},
{
spell: SPELLS.ASTRAL_COMMUNION_TALENT,
category: Abilities.SPELL_CATEGORIES.COOLDOWNS,
cooldown: 80,
enabled: combatant.hasTalent(SPELLS.ASTRAL_COMMUNION_TALENT.id),
castEfficiency: {
suggestion: true,
recommendedEfficiency: 0.9,
},
timelineSortIndex: 11,
},
//Utility
{
spell: SPELLS.INNERVATE,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 180,
castEfficiency: {
suggestion: true,
recommendedEfficiency: 0.70,
averageIssueEfficiency: 0.50,
majorIssueEfficiency: 0.30,
},
timelineSortIndex: 12,
},
{
spell: SPELLS.BARKSKIN,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 60,
castEfficiency: {
suggestion: true,
recommendedEfficiency: 0.6,
importance: ISSUE_IMPORTANCE.MINOR,
},
timelineSortIndex: 13,
},
{
spell: SPELLS.RENEWAL_TALENT,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 90,
enabled: combatant.hasTalent(SPELLS.RENEWAL_TALENT.id),
timelineSortIndex: 14,
},
{
spell: SPELLS.DISPLACER_BEAST_TALENT,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 30,
enabled: combatant.hasTalent(SPELLS.DISPLACER_BEAST_TALENT.id),
isOnGCD: true,
},
{
spell: [SPELLS.WILD_CHARGE_TALENT, SPELLS.WILD_CHARGE_MOONKIN, SPELLS.WILD_CHARGE_CAT, SPELLS.WILD_CHARGE_BEAR, SPELLS.WILD_CHARGE_TRAVEL],
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 15,
enabled: combatant.hasTalent(SPELLS.WILD_CHARGE_TALENT.id),
},
{
spell: SPELLS.MIGHTY_BASH_TALENT,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 50,
enabled: combatant.hasTalent(SPELLS.MIGHTY_BASH_TALENT.id),
isOnGCD: true,
},
{
spell: SPELLS.MASS_ENTANGLEMENT_TALENT,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 30,
enabled: combatant.hasTalent(SPELLS.MASS_ENTANGLEMENT_TALENT.id),
isOnGCD: true,
},
{
spell: SPELLS.TYPHOON,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 30,
enabled: combatant.hasTalent(SPELLS.TYPHOON_TALENT.id),
isOnGCD: true,
},
{
spell: SPELLS.ENTANGLING_ROOTS,
category: Abilities.SPELL_CATEGORIES.UTILITY,
isOnGCD: true,
},
{
spell: SPELLS.DASH,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 180,
isOnGCD: true, //It is not on the GCD if already in catform. Pretty low prio to fix since you can't cast anything meaning full in catform anyway.
},
{
spell: SPELLS.SOLAR_BEAM,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 45,
},
{
spell: SPELLS.REMOVE_CORRUPTION,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 8,
isOnGCD: true,
},
{
spell: SPELLS.REBIRTH,
category: Abilities.SPELL_CATEGORIES.UTILITY,
isOnGCD: true,
},
{
spell: SPELLS.GROWL,
category: Abilities.SPELL_CATEGORIES.UTILITY,
cooldown: 8,
},
{
spell: SPELLS.BEAR_FORM,
category: Abilities.SPELL_CATEGORIES.UTILITY,
isOnGCD: true,
},
{
spell: SPELLS.CAT_FORM,
category: Abilities.SPELL_CATEGORIES.UTILITY,
isOnGCD: true,
},
{
spell: SPELLS.MOONKIN_FORM,
category: Abilities.SPELL_CATEGORIES.UTILITY,
isOnGCD: true,
},
{
spell: SPELLS.TRAVEL_FORM,
category: Abilities.SPELL_CATEGORIES.UTILITY,
isOnGCD: true,
},
{
spell: SPELLS.REGROWTH,
category: Abilities.SPELL_CATEGORIES.UTILITY,
isOnGCD: true,
},
{
spell: SPELLS.FRENZIED_REGENERATION,
category: Abilities.SPELL_CATEGORIES.UTILITY,
enabled: combatant.hasTalent(SPELLS.GUARDIAN_AFFINITY_TALENT_SHARED.id),
},
{
spell: SPELLS.SWIFTMEND,
category: Abilities.SPELL_CATEGORIES.UTILITY,
enabled: combatant.hasTalent(SPELLS.RESTORATION_AFFINITY_TALENT.id),
isOnGCD: true,
},
{
spell: SPELLS.REJUVENATION,
category: Abilities.SPELL_CATEGORIES.UTILITY,
enabled: combatant.hasTalent(SPELLS.RESTORATION_AFFINITY_TALENT.id),
isOnGCD: true,
},
{
spell: SPELLS.SWIFTMEND,
category: Abilities.SPELL_CATEGORIES.UTILITY,
enabled: combatant.hasTalent(SPELLS.RESTORATION_AFFINITY_TALENT.id),
isOnGCD: true,
},
];
}
}
export default Abilities;<|fim▁end|> | |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>__author__ = 'Robbert Harms'
__date__ = "2015-04-23"
__maintainer__ = "Robbert Harms"
__email__ = "[email protected]"
class DVS(object):
def __init__(self, comments, dvs_tables):
"""Create a new DVS object
Args:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
Attributes:
comments (str): The list with comments on top of the file
dvs_tables (list of DVSDirectionTable): The list with the direction tables
"""
self.comments = comments<|fim▁hole|> self.dvs_tables = dvs_tables
def get_file_string(self, windows_line_endings=True):
"""Get a complete string representation of the DVS.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments + "\n"
s += "\n".join([table.get_file_string(windows_line_endings=False) for table in self.dvs_tables])
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s
def get_overview_representation(self):
"""Get a small overview of the contained contents."""
s = 'Nmr tables: {}'.format(len(self.dvs_tables)) + "\n"
for i, table in enumerate(self.dvs_tables):
s += 'Table {}: {} directions'.format(i, table.table.shape[0]) + "\n"
return s
class DVSDirectionTable(object):
def __init__(self, table, comments='', coordinate_system='xyz', normalisation='none'):
"""A representation of a direction table.
Args:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
Attributes:
table (ndarray): The actual table
comments (str): The list with comments above this table
coordinate_system (str): The coordinate system (for example 'xyz')
normalisation (str): The normalisation definition (normally 'none')
"""
self.table = table
self.comments = comments
self.coordinate_system = coordinate_system
self.normalisation = normalisation
def get_file_string(self, windows_line_endings=True):
"""Get a complete string representation of this direction table.
Args:
windows_line_endings (boolean): If we want to include an \r before every \n
"""
s = self.comments
s += '[directions={}]'.format(self.table.shape[0]) + "\n"
s += 'CoordinateSystem = {}'.format(self.coordinate_system) + "\n"
s += 'Normalisation = {}'.format(self.normalisation) + "\n"
for i in range(self.table.shape[0]):
s += 'Vector[{0}] = ( {1}, {2}, {3} )'.format(i, *self.table[i, :]) + "\n"
if windows_line_endings:
s = s.replace("\n", "\r\n")
return s<|fim▁end|> | |
<|file_name|>pin.py<|end_file_name|><|fim▁begin|>""" This test need a set of pins which can be set as inputs and have no external
pull up or pull down connected.
"""
from machine import Pin
import os
mch = os.uname().machine
if 'LaunchPad' in mch:
pin_map = ['GP24', 'GP12', 'GP14', 'GP15', 'GP16', 'GP17', 'GP28', 'GP8', 'GP6', 'GP30', 'GP31', 'GP3', 'GP0', 'GP4', 'GP5']
max_af_idx = 15
elif 'WiPy' in mch:
pin_map = ['GP23', 'GP24', 'GP12', 'GP13', 'GP14', 'GP9', 'GP17', 'GP28', 'GP22', 'GP8', 'GP30', 'GP31', 'GP0', 'GP4', 'GP5']
max_af_idx = 15
else:
raise Exception('Board not supported!')
def test_noinit():
for p in pin_map:
pin = Pin(p)
pin.value()
def test_pin_read(pull):
# enable the pull resistor on all pins, then read the value
for p in pin_map:
pin = Pin(p, mode=Pin.IN, pull=pull)
for p in pin_map:
print(pin())
def test_pin_af():
for p in pin_map:
for af in Pin(p).alt_list():
if af[1] <= max_af_idx:
Pin(p, mode=Pin.ALT, alt=af[1])
Pin(p, mode=Pin.ALT_OPEN_DRAIN, alt=af[1])
# test un-initialized pins
test_noinit()
# test with pull-up and pull-down
test_pin_read(Pin.PULL_UP)
test_pin_read(Pin.PULL_DOWN)
# test all constructor combinations
pin = Pin(pin_map[0])
pin = Pin(pin_map[0], mode=Pin.IN)
pin = Pin(pin_map[0], mode=Pin.OUT)
pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.PULL_DOWN)
pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.PULL_UP)
pin = Pin(pin_map[0], mode=Pin.OPEN_DRAIN, pull=Pin.PULL_UP)
pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_DOWN)
pin = Pin(pin_map[0], mode=Pin.OUT, pull=None)
pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP)
pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.LOW_POWER)
pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.MED_POWER)
pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.HIGH_POWER)
pin = Pin(pin_map[0], mode=Pin.OUT, drive=pin.LOW_POWER)
pin = Pin(pin_map[0], Pin.OUT, Pin.PULL_DOWN)
pin = Pin(pin_map[0], Pin.ALT, Pin.PULL_UP)
pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP)
test_pin_af() # try the entire af range on all pins
# test pin init and printing
pin = Pin(pin_map[0])
pin.init(mode=Pin.IN)
print(pin)
pin.init(Pin.IN, Pin.PULL_DOWN)
print(pin)
pin.init(mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.LOW_POWER)
print(pin)
pin.init(mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.HIGH_POWER)
print(pin)
# test value in OUT mode
pin = Pin(pin_map[0], mode=Pin.OUT)
pin.value(0)
pin.toggle() # test toggle
print(pin())
pin.toggle() # test toggle again
print(pin())
# test different value settings
pin(1)
print(pin.value())
pin(0)
print(pin.value())
pin.value(1)
print(pin())
pin.value(0)
print(pin())
# test all getters and setters
pin = Pin(pin_map[0], mode=Pin.OUT)
# mode
print(pin.mode() == Pin.OUT)
pin.mode(Pin.IN)
print(pin.mode() == Pin.IN)
# pull
pin.pull(None)
print(pin.pull() == None)
pin.pull(Pin.PULL_DOWN)
print(pin.pull() == Pin.PULL_DOWN)
# drive
pin.drive(Pin.MED_POWER)
print(pin.drive() == Pin.MED_POWER)
pin.drive(Pin.HIGH_POWER)
print(pin.drive() == Pin.HIGH_POWER)
# id
print(pin.id() == pin_map[0])
# all the next ones MUST raise
try:
pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.IN) # incorrect drive value
except Exception:
print('Exception')
try:
pin = Pin(pin_map[0], mode=Pin.LOW_POWER, pull=Pin.PULL_UP) # incorrect mode value
except Exception:
print('Exception')
try:
pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.HIGH_POWER) # incorrect pull value
except Exception:
print('Exception')
try:
pin = Pin('A0', Pin.OUT, Pin.PULL_DOWN) # incorrect pin id
except Exception:
print('Exception')
try:
pin = Pin(pin_map[0], Pin.IN, Pin.PULL_UP, alt=0) # af specified in GPIO mode<|fim▁hole|> print('Exception')
try:
pin = Pin(pin_map[0], Pin.OUT, Pin.PULL_UP, alt=7) # af specified in GPIO mode
except Exception:
print('Exception')
try:
pin = Pin(pin_map[0], Pin.ALT, Pin.PULL_UP, alt=0) # incorrect af
except Exception:
print('Exception')
try:
pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP, alt=-1) # incorrect af
except Exception:
print('Exception')
try:
pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP, alt=16) # incorrect af
except Exception:
print('Exception')
try:
pin.mode(Pin.PULL_UP) # incorrect pin mode
except Exception:
print('Exception')
try:
pin.pull(Pin.OUT) # incorrect pull
except Exception:
print('Exception')
try:
pin.drive(Pin.IN) # incorrect drive strength
except Exception:
print('Exception')
try:
pin.id('ABC') # id cannot be set
except Exception:
print('Exception')<|fim▁end|> | except Exception: |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""
Django settings for lwc project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '1&pbr@s*=_81p1qsdo&o)c_q-^a&lgaojj!6l^-_1^ne$ffql8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south',
'joins',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'lwc.middleware.ReferMiddleware',
)
ROOT_URLCONF = 'lwc.urls'
WSGI_APPLICATION = 'lwc.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases<|fim▁hole|>DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
#SHARE_URL = "http://launchwithcode.com/?ref="
SHARE_URL = "http://127.0.0.1:8000/?ref="
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates')
#BASE_DIR + "/templates/",
#'/Users/jmitch/Desktop/lwc/src/templates/',
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
#STATIC_ROOT = '/Users/jmitch/desktop/lwc/src/static/static_root/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static', 'static_root')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static', 'static_dirs'),
#'/Users/jmitch/desktop/lwc/src/static/static_dirs/',
# '/Users/jmitch/desktop/lwc/src/static/static_dirs/',
# '/Users/jmitch/desktop/lwc/src/static/static_dirs/',
# '/Users/jmitch/desktop/lwc/src/static/static_dirs/',
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'static', 'media')
MEIDA_URL = '/media/'<|fim▁end|> | |
<|file_name|>PlaylistsDialog.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2010 Erik Nilsson, software on versionstudio point com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
Ext.namespace("vibe.plugin.playlists");
vibe.plugin.playlists.PlaylistsDialog = Ext.extend(Ext.Window,
{
/**
* @cfg {Mixed} data
* The data records for the combo box simple store.
*/
data: [],
/**
* @cfg {String} emptyText
* The text to display if the combo box is empty.
*/
emptyText : "",
/**
* @cfg {String} requireSelection
* Require a selection in the list.
*/
requireSelection : false,
// private
closeButton : null,
// private
comboBox : null,
// private
okButton : null,
/**
* @override
*/
initComponent: function()
{
this.addEvents(
/**
* @event submit
* Fired when the dialog is successfully submitted through
* a click on the OK button.
* @param {String} name the name of the playlist
* @param {Number} playlistId the database id of the playlist or null if
* playlist does not exists in the database.
*/
"submit"
);
var store = new Ext.data.SimpleStore({
autoLoad: false,
data: this.data,
fields: ["name","playlistId"]
});
this.comboBox = new Ext.form.ComboBox({
displayField: "name",
emptyText: this.emptyText,
enableKeyEvents: true,
forceSelection: false,
mode: "local",
selectOnFocus: true,
store: store,
triggerAction: "all",
typeAhead: this.requireSelection
});
this.okButton = new Ext.Button({
disabled: true,
minWidth: 75,
scope: this,
text: vibe.Language.app.OK
});
this.closeButton = new Ext.Button({
minWidth: 75,
scope: this,
text: vibe.Language.app.CLOSE
});
Ext.apply(this,
{
bodyStyle: "padding: 10px 10px 10px 10px",
buttonAlign: "center",
buttons: [this.okButton,
this.closeButton],
height: 110,
items: [this.comboBox],
modal: true,
layout: "fit",
resizable: false,
shadowOffset: 6,
width: 300
});
vibe.plugin.playlists.PlaylistsDialog.superclass.initComponent.call(this);
this.closeButton.on("click",this.onCloseButtonClick,this);
this.comboBox.on("keyup",this.onComboBoxKeyUp,this);
this.comboBox.on("select",this.onComboBoxSelect,this);
this.okButton.on("click",this.onOkButtonClick,this);
},
<|fim▁hole|> onComboBoxKeyUp : function(comboBox,e)
{
if ( this.comboBox.getValue().length==0 ) {
this.okButton.disable();
return;
}
if ( this.requireSelection && this.getSelectedRecord()==null ) {
this.okButton.disable();
}
else {
this.okButton.enable();
}
},
// private
onComboBoxSelect : function(comboBox,record,index)
{
this.okButton.enable();
},
// private
onCloseButtonClick : function()
{
this.close();
},
// private
onOkButtonClick : function()
{
var name = null;
var playlistId = null;
var record = this.getSelectedRecord();
if ( record!=null ) {
name = record.get("name");
playlistId = record.get("playlistId");
}
else {
name = this.comboBox.getValue();
}
this.fireEvent("submit",name,playlistId);
this.close();
},
// private
getSelectedRecord : function()
{
var selectedRecord = null;
var index = this.comboBox.selectedIndex;
if ( index!=-1 )
{
// make sure selected record matches exactly
// the combo box value
var record = this.comboBox.store.getAt(index);
if ( record.get("name")==this.comboBox.getValue() ) {
selectedRecord = record;
}
}
return selectedRecord;
}
});<|fim▁end|> |
// private
|
<|file_name|>skype_bot2.py<|end_file_name|><|fim▁begin|>from skypebot import *
class Skype_Bot:
"""
This class handles communication with Skype via SkypeBot
"""
def __init__(self, plugins):
self.skype = Skypebot.Skype(Events=self)
self.skype.FriendlyName = "Skype Bot Levitan"
self.skype.Attach()
self.plugins = plugins
def AttachmentStatus(self, status):
if status == Skypebot.apiAttachAvailable:
self.skype.Attach()
def MessageStatus(self, msg, status):
print("INCOMING> %s" % msg.Body)
# msg.MarkAsSeen()
if status == Skypebot.cmsReceived:
for plugin in self.plugins:
r = plugin.plugin_process_request(msg)
if r['status']:<|fim▁hole|> msg.Chat.SendMessage(r['message'])
def send(self, topic, message):
"""
Manual send to CONFERENCES to handle command line interface
:param topic: topic of the conference (it's name)
:param message: thing to say
:return:
"""
for chat in self.skype.Chats:
if chat.Topic == topic:
chat.SendMessage(message)<|fim▁end|> | |
<|file_name|>pisa_same_entity.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
check same interface phos_binding patterns
"""
import os
import sys
import urllib
import urllib2
import cPickle as pickle
from multiprocessing import Pool
def get_entityid(p):
pdbid,interface_id,chain1,chain2 = p
url = 'http://www.rcsb.org/pdb/rest/customReport.csv?'
data = {
'pdbids':pdbid,
'customReportColumns':'structureId,entityId',
'service':'wsfile',
'format':'csv',
}
data = urllib.urlencode(data)
req = urllib2.Request(url,data)
response = urllib2.urlopen(req)
lines = response.readlines()
lines = [line.rstrip('\r\n') for line in lines[1:]]
lines = [line for line in lines if line]
lines = [line.split(',') for line in lines]
lines = [[w.strip('"') for w in line] for line in lines]
chain1_id = [line for line in lines if line[1] == chain1][0][2]
chain2_id = [line for line in lines if line[1] == chain1][0][2]
return pdbid,interface_id,chain1_id,chain2_id
def filter_same_interface(pdb_interfaces):
pdbid_chain = [(p[0],p[1],p[-1][0][0],p[-1][1][0]) for p in pdb_interfaces]
p = Pool(4)
result = p.map(get_entityid,pdbid_chain)
p.close()
pdb_chain_entity = {}
for r in result:
if not (r[0],r[2],r[3]) in pdb_chain_entity.keys():
pdb_chain_entity[(r[0],r[2],r[3])] = [r]
else:
pdb_chain_entity[(r[0],r[2],r[3])].append(r)
with open('same_interface.txt','w') as w_f:
same = []
different = []
for k,v in pdb_chain_entity.iteritems():<|fim▁hole|> print >> w_f,k
cluster = [p for p in pdb_interfaces if (p[0],p[1]) in [(vi[0],vi[1]) for vi in v]]
cluster_patterns = []
for c in cluster:
bonds = c[6]
phos_interacting_residues = {}
PHOS = ['TPO_ O1P','TPO_ O2P','TPO_ O3P','TPO_ OG1','SEP_ O1P','SEP_ O2P','SEP_ O3P','SEP_ OG ','PTR_ O1P','PTR_ O2P','PTR _O3P','PTR OH ']
for bond in bonds:
bond_type,bond_info = bond
for bondi in bond_info:
res1,res2,dist = bondi
if [p for p in PHOS if res1[-8:] == p]:
res1 = '_'.join(res1.split('_')[:3])
if not res1 in phos_interacting_residues.keys():
phos_interacting_residues[res1] = [res2]
else:
phos_interacting_residues[res1].append(res2)
elif [p for p in PHOS if res2[-8:] == p]:
res2 = '_'.join(res2.split('_')[:3])
if not res2 in phos_interacting_residues.keys():
phos_interacting_residues[res2] = [res1]
else:
phos_interacting_residues[res2].append(res1)
for phos,interacting_residues in phos_interacting_residues.items():
if interacting_residues:
interacting_residues = ['_'.join(r.split('_')[:3]) for r in interacting_residues]
interacting_residues = list(set(interacting_residues))
interacting_residues = [r.split('_')[2] for r in interacting_residues]
interacting_residues = sorted(interacting_residues)
interacting_residues = '_'.join(interacting_residues)
cluster_patterns.append(interacting_residues)
print >> w_f,c[0],c[1],interacting_residues
print cluster_patterns
if len(cluster_patterns) > 1 and len(set(cluster_patterns)) == 1:
same.append(1)
else:
different.append(1)
print 'same',len(same)
print 'different',len(different)
pdb_unique_interface = [(v[0][0],v[0][1]) for k,v in pdb_chain_entity.iteritems()]
pdb_interfaces = [p for p in pdb_interfaces if (p[0],p[1]) in pdb_unique_interface]
print 'after filter same entity',len(pdb_interfaces)
return pdb_interfaces
def filter_non_one_phos(pdb_interfaces):
zero_phos_interfaces = []
one_phos_interfaces = []
more_phos_interfaces = []
for interface in pdb_interfaces:
pdbid,p1,interface_area,p2,p3,p4,bonds = interface[:7]
phos_res = []
for bond in bonds:
bond_type,bond_info = bond
for bondi in bond_info:
res1,res2,dist = bondi
if 'TPO' in res1 or 'SEP' in res1 or 'PTR' in res1:
phos_res.append('_'.join(res1.split('_')[:3]))
if 'TPO' in res2 or 'SEP' in res2 or 'PTR' in res2:
phos_res.append('_'.join(res2.split('_')[:3]))
phos_res = set(phos_res)
if len(phos_res) == 1:
one_phos_interfaces.append(interface)
elif len(phos_res) > 1:
more_phos_interfaces.append(interface)
else:
zero_phos_interfaces.append(interface)
print 'after filter non_one_phos_interfaces',len(one_phos_interfaces)
return one_phos_interfaces
def main():
pdb_interfaces = pickle.load(open(sys.argv[-1]))
pdb_interfaces = [p for p in pdb_interfaces if p[7][0][2].lower() == 'x,y,z' and p[7][1][2].lower() == 'x,y,z']
pdb_interfaces = [p for p in pdb_interfaces if p[7][0][1] == 'Protein' and p[7][1][1] == 'Protein']
pdb_interfaces = filter_non_one_phos(pdb_interfaces)
pdb_interfaces = filter_same_interface(pdb_interfaces)
if __name__ == "__main__":
main()<|fim▁end|> | if len(v) > 1: |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase, RequestFactory
from django.core.urlresolvers import reverse
from django.template import Template, Context
from django.core.cache import cache
from djconfig import config
from ...core.tests import utils
from .models import CommentBookmark
from .forms import BookmarkForm
class CommentBookmarkViewTest(TestCase):
def setUp(self):<|fim▁hole|> self.topic = utils.create_topic(category=self.category, user=self.user)
self.comment = utils.create_comment(topic=self.topic)
def test_bookmark_create(self):
"""
create comment
"""
utils.login(self)
form_data = {'comment_number': 999, }
response = self.client.post(reverse('spirit:comment:bookmark:create', kwargs={'topic_id': self.topic.pk, }),
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
data=form_data)
self.assertEqual(response.status_code, 200)
class CommentBookmarkModelsTest(TestCase):
def setUp(self):
cache.clear()
self.user = utils.create_user()
self.category = utils.create_category()
self.topic = utils.create_topic(category=self.category, user=self.user)
for _ in range(config.comments_per_page * 4): # 4 pages
utils.create_comment(user=self.user, topic=self.topic)
def test_comment_bookmark_update_or_create(self):
"""
Should update or create the comment number
"""
page = 2
CommentBookmark.update_or_create(
user=self.user,
topic=self.topic,
comment_number=CommentBookmark.page_to_comment_number(page)
)
comment_bookmark = CommentBookmark.objects.get(user=self.user, topic=self.topic)
self.assertEqual(comment_bookmark.comment_number, config.comments_per_page * (page - 1) + 1)
def test_comment_bookmark_update_or_create_invalid_page(self):
"""
Should do nothing when receiving an invalid page
"""
page = 'im_a_string'
CommentBookmark.update_or_create(
user=self.user,
topic=self.topic,
comment_number=CommentBookmark.page_to_comment_number(page)
)
self.assertEqual(len(CommentBookmark.objects.all()), 0)
class CommentBookmarkFormTest(TestCase):
def test_form(self):
form_data = {'comment_number': 999, }
form = BookmarkForm(data=form_data)
self.assertEqual(form.is_valid(), True)
class CommentBookmarkTemplateTagsTest(TestCase):
def setUp(self):
cache.clear()
self.user = utils.create_user()
self.category = utils.create_category()
self.topic = utils.create_topic(self.category)
self.comment = utils.create_comment(topic=self.topic)
def populate_bookmarks(self):
"""
should populate the topic's bookmark
"""
bookmark = CommentBookmark.objects.create(user=self.user, topic=self.topic, comment_number=10)
out = Template(
"{% load comment_bookmark %}"
"{% populate_bookmarks topics=topics user=user %}"
"{{ topics.0.bookmark.get_absolute_url }}"
).render(Context({'topics': [self.topic, ], 'user': self.user}))
self.assertEqual(out, bookmark.get_absolute_url())<|fim▁end|> | cache.clear()
self.user = utils.create_user()
self.category = utils.create_category() |
<|file_name|>ofctl_v1_2.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import struct
import socket
import logging
import netaddr
from ryu.ofproto import ether
from ryu.ofproto import inet
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ofproto_v1_2_parser
from ryu.lib import hub
from ryu.lib import mac
LOG = logging.getLogger('ryu.lib.ofctl_v1_2')
DEFAULT_TIMEOUT = 1.0
def str_to_int(src):
if isinstance(src, str):
if src.startswith("0x") or src.startswith("0X"):
dst = int(src, 16)
else:
dst = int(src)
else:
dst = src
return dst
def to_action(dp, dic):
ofp = dp.ofproto
parser = dp.ofproto_parser
action_type = dic.get('type')
if action_type == 'OUTPUT':
out_port = int(dic.get('port', ofp.OFPP_ANY))
max_len = int(dic.get('max_len', ofp.OFPCML_MAX))
result = parser.OFPActionOutput(out_port, max_len)
elif action_type == 'COPY_TTL_OUT':
result = parser.OFPActionCopyTtlOut()
elif action_type == 'COPY_TTL_IN':
result = parser.OFPActionCopyTtlIn()
elif action_type == 'SET_MPLS_TTL':
mpls_ttl = int(dic.get('mpls_ttl'))
result = parser.OFPActionSetMplsTtl(mpls_ttl)
elif action_type == 'DEC_MPLS_TTL':
result = parser.OFPActionDecMplsTtl()
elif action_type == 'PUSH_VLAN':
ethertype = int(dic.get('ethertype'))
result = parser.OFPActionPushVlan(ethertype)
elif action_type == 'POP_VLAN':
result = parser.OFPActionPopVlan()
elif action_type == 'PUSH_MPLS':
ethertype = int(dic.get('ethertype'))
result = parser.OFPActionPushMpls(ethertype)
elif action_type == 'POP_MPLS':
ethertype = int(dic.get('ethertype'))
result = parser.OFPActionPopMpls(ethertype)
elif action_type == 'SET_QUEUE':
queue_id = int(dic.get('queue_id'))
result = parser.OFPActionSetQueue(queue_id)
elif action_type == 'GROUP':
group_id = int(dic.get('group_id'))
result = parser.OFPActionGroup(group_id)
elif action_type == 'SET_NW_TTL':
nw_ttl = int(dic.get('nw_ttl'))
result = parser.OFPActionSetNwTtl(nw_ttl)
elif action_type == 'DEC_NW_TTL':
result = parser.OFPActionDecNwTtl()
elif action_type == 'SET_FIELD':
field = dic.get('field')
value = dic.get('value')
result = parser.OFPActionSetField(**{field: value})
else:
result = None
return result
def to_actions(dp, acts):
inst = []
actions = []
ofp = dp.ofproto
parser = dp.ofproto_parser
for a in acts:
action = to_action(dp, a)
if action is not None:
actions.append(action)
else:
action_type = a.get('type')
if action_type == 'GOTO_TABLE':
table_id = int(a.get('table_id'))
inst.append(parser.OFPInstructionGotoTable(table_id))
elif action_type == 'WRITE_METADATA':
metadata = str_to_int(a.get('metadata'))
metadata_mask = (str_to_int(a['metadata_mask'])
if 'metadata_mask' in a
else parser.UINT64_MAX)
inst.append(
parser.OFPInstructionWriteMetadata(
metadata, metadata_mask))
else:
LOG.debug('Unknown action type: %s' % action_type)
inst.append(parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
actions))
return inst
def action_to_str(act):
action_type = act.cls_action_type
if action_type == ofproto_v1_2.OFPAT_OUTPUT:
buf = 'OUTPUT:' + str(act.port)
elif action_type == ofproto_v1_2.OFPAT_COPY_TTL_OUT:
buf = 'COPY_TTL_OUT'
elif action_type == ofproto_v1_2.OFPAT_COPY_TTL_IN:
buf = 'COPY_TTL_IN'
elif action_type == ofproto_v1_2.OFPAT_SET_MPLS_TTL:
buf = 'SET_MPLS_TTL:' + str(act.mpls_ttl)
elif action_type == ofproto_v1_2.OFPAT_DEC_MPLS_TTL:
buf = 'DEC_MPLS_TTL'
elif action_type == ofproto_v1_2.OFPAT_PUSH_VLAN:
buf = 'PUSH_VLAN:' + str(act.ethertype)
elif action_type == ofproto_v1_2.OFPAT_POP_VLAN:
buf = 'POP_VLAN'
elif action_type == ofproto_v1_2.OFPAT_PUSH_MPLS:
buf = 'PUSH_MPLS:' + str(act.ethertype)
elif action_type == ofproto_v1_2.OFPAT_POP_MPLS:
buf = 'POP_MPLS:' + str(act.ethertype)
elif action_type == ofproto_v1_2.OFPAT_SET_QUEUE:
buf = 'SET_QUEUE:' + str(act.queue_id)
elif action_type == ofproto_v1_2.OFPAT_GROUP:
buf = 'GROUP:' + str(act.group_id)
elif action_type == ofproto_v1_2.OFPAT_SET_NW_TTL:
buf = 'SET_NW_TTL:' + str(act.nw_ttl)
elif action_type == ofproto_v1_2.OFPAT_DEC_NW_TTL:
buf = 'DEC_NW_TTL'
elif action_type == ofproto_v1_2.OFPAT_SET_FIELD:
buf = 'SET_FIELD: {%s:%s}' % (act.key, act.value)
else:
buf = 'UNKNOWN'
return buf
def actions_to_str(instructions):
actions = []
for instruction in instructions:
if isinstance(instruction,
ofproto_v1_2_parser.OFPInstructionActions):
for a in instruction.actions:
actions.append(action_to_str(a))
elif isinstance(instruction,
ofproto_v1_2_parser.OFPInstructionGotoTable):
buf = 'GOTO_TABLE:' + str(instruction.table_id)
actions.append(buf)
elif isinstance(instruction,
ofproto_v1_2_parser.OFPInstructionWriteMetadata):
buf = ('WRITE_METADATA:0x%x/0x%x' % (instruction.metadata,
instruction.metadata_mask)
if instruction.metadata_mask
else 'WRITE_METADATA:0x%x' % instruction.metadata)
actions.append(buf)
else:
continue
return actions
def to_match(dp, attrs):
convert = {'in_port': int,
'in_phy_port': int,
'metadata': to_match_metadata,
'dl_dst': to_match_eth,
'dl_src': to_match_eth,
'eth_dst': to_match_eth,
'eth_src': to_match_eth,
'dl_type': int,
'eth_type': int,
'dl_vlan': to_match_vid,
'vlan_vid': to_match_vid,
'vlan_pcp': int,
'ip_dscp': int,
'ip_ecn': int,
'nw_proto': int,
'ip_proto': int,
'nw_src': to_match_ip,
'nw_dst': to_match_ip,
'ipv4_src': to_match_ip,
'ipv4_dst': to_match_ip,
'tp_src': int,
'tp_dst': int,
'tcp_src': int,
'tcp_dst': int,
'udp_src': int,
'udp_dst': int,
'sctp_src': int,
'sctp_dst': int,
'icmpv4_type': int,
'icmpv4_code': int,
'arp_op': int,
'arp_spa': to_match_ip,
'arp_tpa': to_match_ip,
'arp_sha': to_match_eth,
'arp_tha': to_match_eth,
'ipv6_src': to_match_ip,
'ipv6_dst': to_match_ip,
'ipv6_flabel': int,
'icmpv6_type': int,
'icmpv6_code': int,
'ipv6_nd_target': to_match_ip,
'ipv6_nd_sll': to_match_eth,
'ipv6_nd_tll': to_match_eth,
'mpls_label': int,
'mpls_tc': int}
keys = {'dl_dst': 'eth_dst',
'dl_src': 'eth_src',
'dl_type': 'eth_type',
'dl_vlan': 'vlan_vid',
'nw_src': 'ipv4_src',
'nw_dst': 'ipv4_dst',
'nw_proto': 'ip_proto'}
if attrs.get('dl_type') == ether.ETH_TYPE_ARP or \
attrs.get('eth_type') == ether.ETH_TYPE_ARP:
if 'nw_src' in attrs and 'arp_spa' not in attrs:
attrs['arp_spa'] = attrs['nw_src']
del attrs['nw_src']
if 'nw_dst' in attrs and 'arp_tpa' not in attrs:
attrs['arp_tpa'] = attrs['nw_dst']
del attrs['nw_dst']
kwargs = {}
for key, value in attrs.items():
if key in convert:
value = convert[key](value)
if key in keys:
# For old field name
key = keys[key]
if key == 'tp_src' or key == 'tp_dst':
# TCP/UDP port
conv = {inet.IPPROTO_TCP: {'tp_src': 'tcp_src',
'tp_dst': 'tcp_dst'},
inet.IPPROTO_UDP: {'tp_src': 'udp_src',
'tp_dst': 'udp_dst'}}
ip_proto = attrs.get('nw_proto', attrs.get('ip_proto', 0))
key = conv[ip_proto][key]
kwargs[key] = value
else:
# others
kwargs[key] = value
return dp.ofproto_parser.OFPMatch(**kwargs)
def to_match_eth(value):
if '/' in value:
value = value.split('/')
return value[0], value[1]
else:
return value
def to_match_ip(value):
if '/' in value:
ip = netaddr.ip.IPNetwork(value)
ip_addr = str(ip.ip)
ip_mask = str(ip.netmask)
return ip_addr, ip_mask
else:
return value
def to_match_vid(value):
# NOTE: If "vlan_id/dl_vlan" field is described as decimal int value
# (and decimal string value), it is treated as values of
# VLAN tag, and OFPVID_PRESENT(0x1000) bit is automatically
# applied. OTOH, If it is described as hexadecimal string,
# treated as values of oxm_value (including OFPVID_PRESENT
# bit), and OFPVID_PRESENT bit is NOT automatically applied.
if isinstance(value, int):
# described as decimal int value
return value | ofproto_v1_2.OFPVID_PRESENT
else:
if '/' in value:
val = value.split('/')
return int(val[0], 0), int(val[1], 0)
else:
if value.isdigit():
# described as decimal string value
return int(value, 10) | ofproto_v1_2.OFPVID_PRESENT
else:
return int(value, 0)
def to_match_metadata(value):
if '/' in value:
value = value.split('/')
return str_to_int(value[0]), str_to_int(value[1])
else:
return str_to_int(value)
def match_to_str(ofmatch):
keys = {'eth_src': 'dl_src',
'eth_dst': 'dl_dst',
'eth_type': 'dl_type',
'vlan_vid': 'dl_vlan',
'ipv4_src': 'nw_src',
'ipv4_dst': 'nw_dst',
'ip_proto': 'nw_proto',
'tcp_src': 'tp_src',
'tcp_dst': 'tp_dst',
'udp_src': 'tp_src',
'udp_dst': 'tp_dst'
}
match = {}
ofmatch = ofmatch.to_jsondict()['OFPMatch']
ofmatch = ofmatch['oxm_fields']
for match_field in ofmatch:
key = match_field['OXMTlv']['field']
if key in keys:
key = keys[key]
mask = match_field['OXMTlv']['mask']
value = match_field['OXMTlv']['value']
if key == 'dl_vlan':
value = match_vid_to_str(value, mask)
elif key == 'metadata':
value = match_metadata_to_str(value, mask)
else:
if mask is not None:
value = value + '/' + mask
else:
value = value
match.setdefault(key, value)
return match
def match_metadata_to_str(value, mask):
return ('%d/%d' % (value, mask) if mask else '%d' % value)
def match_vid_to_str(value, mask):
if mask is not None:
value = '0x%04x/0x%04x' % (value, mask)
else:
if value & ofproto_v1_2.OFPVID_PRESENT:
value = str(value & ~ofproto_v1_2.OFPVID_PRESENT)
else:
value = '0x%04x' % value
return value
def send_stats_request(dp, stats, waiters, msgs):
dp.set_xid(stats)
waiters_per_dp = waiters.setdefault(dp.id, {})
lock = hub.Event()
waiters_per_dp[stats.xid] = (lock, msgs)
dp.send_msg(stats)
lock.wait(timeout=DEFAULT_TIMEOUT)
if not lock.is_set():
del waiters_per_dp[stats.xid]
def get_desc_stats(dp, waiters):
stats = dp.ofproto_parser.OFPDescStatsRequest(dp)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
s = {}
for msg in msgs:
stats = msg.body
s = {'mfr_desc': stats.mfr_desc,
'hw_desc': stats.hw_desc,
'sw_desc': stats.sw_desc,
'serial_num': stats.serial_num,
'dp_desc': stats.dp_desc}
desc = {str(dp.id): s}
return desc
def get_queue_stats(dp, waiters):
ofp = dp.ofproto
stats = dp.ofproto_parser.OFPQueueStatsRequest(dp, 0, ofp.OFPP_ANY,<|fim▁hole|> msgs = []
send_stats_request(dp, stats, waiters, msgs)
s = []
for msg in msgs:
stats = msg.body
for stat in stats:
s.append({'port_no': stat.port_no,
'queue_id': stat.queue_id,
'tx_bytes': stat.tx_bytes,
'tx_errors': stat.tx_errors,
'tx_packets': stat.tx_packets})
desc = {str(dp.id): s}
return desc
def get_flow_stats(dp, waiters, flow={}):
table_id = int(flow.get('table_id', dp.ofproto.OFPTT_ALL))
out_port = int(flow.get('out_port', dp.ofproto.OFPP_ANY))
out_group = int(flow.get('out_group', dp.ofproto.OFPG_ANY))
cookie = int(flow.get('cookie', 0))
cookie_mask = int(flow.get('cookie_mask', 0))
match = to_match(dp, flow.get('match', {}))
stats = dp.ofproto_parser.OFPFlowStatsRequest(
dp, table_id, out_port, out_group, cookie, cookie_mask, match)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
flows = []
for msg in msgs:
for stats in msg.body:
actions = actions_to_str(stats.instructions)
match = match_to_str(stats.match)
s = {'priority': stats.priority,
'cookie': stats.cookie,
'idle_timeout': stats.idle_timeout,
'hard_timeout': stats.hard_timeout,
'actions': actions,
'match': match,
'byte_count': stats.byte_count,
'duration_sec': stats.duration_sec,
'duration_nsec': stats.duration_nsec,
'packet_count': stats.packet_count,
'table_id': stats.table_id,
'length': stats.length}
flows.append(s)
flows = {str(dp.id): flows}
return flows
def get_port_stats(dp, waiters):
stats = dp.ofproto_parser.OFPPortStatsRequest(
dp, dp.ofproto.OFPP_ANY, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
ports = []
for msg in msgs:
for stats in msg.body:
s = {'port_no': stats.port_no,
'rx_packets': stats.rx_packets,
'tx_packets': stats.tx_packets,
'rx_bytes': stats.rx_bytes,
'tx_bytes': stats.tx_bytes,
'rx_dropped': stats.rx_dropped,
'tx_dropped': stats.tx_dropped,
'rx_errors': stats.rx_errors,
'tx_errors': stats.tx_errors,
'rx_frame_err': stats.rx_frame_err,
'rx_over_err': stats.rx_over_err,
'rx_crc_err': stats.rx_crc_err,
'collisions': stats.collisions}
ports.append(s)
ports = {str(dp.id): ports}
return ports
def get_group_stats(dp, waiters):
stats = dp.ofproto_parser.OFPGroupStatsRequest(
dp, dp.ofproto.OFPG_ALL, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
groups = []
for msg in msgs:
for stats in msg.body:
bucket_counters = []
for bucket_counter in stats.bucket_counters:
c = {'packet_count': bucket_counter.packet_count,
'byte_count': bucket_counter.byte_count}
bucket_counters.append(c)
g = {'length': stats.length,
'group_id': stats.group_id,
'ref_count': stats.ref_count,
'packet_count': stats.packet_count,
'byte_count': stats.byte_count,
'bucket_stats': bucket_counters}
groups.append(g)
groups = {str(dp.id): groups}
return groups
def get_group_features(dp, waiters):
ofp = dp.ofproto
type_convert = {ofp.OFPGT_ALL: 'ALL',
ofp.OFPGT_SELECT: 'SELECT',
ofp.OFPGT_INDIRECT: 'INDIRECT',
ofp.OFPGT_FF: 'FF'}
cap_convert = {ofp.OFPGFC_SELECT_WEIGHT: 'SELECT_WEIGHT',
ofp.OFPGFC_SELECT_LIVENESS: 'SELECT_LIVENESS',
ofp.OFPGFC_CHAINING: 'CHAINING',
ofp.OFPGFC_CHAINING_CHECKS: 'CHAINING_CHECKS'}
act_convert = {ofp.OFPAT_OUTPUT: 'OUTPUT',
ofp.OFPAT_COPY_TTL_OUT: 'COPY_TTL_OUT',
ofp.OFPAT_COPY_TTL_IN: 'COPY_TTL_IN',
ofp.OFPAT_SET_MPLS_TTL: 'SET_MPLS_TTL',
ofp.OFPAT_DEC_MPLS_TTL: 'DEC_MPLS_TTL',
ofp.OFPAT_PUSH_VLAN: 'PUSH_VLAN',
ofp.OFPAT_POP_VLAN: 'POP_VLAN',
ofp.OFPAT_PUSH_MPLS: 'PUSH_MPLS',
ofp.OFPAT_POP_MPLS: 'POP_MPLS',
ofp.OFPAT_SET_QUEUE: 'SET_QUEUE',
ofp.OFPAT_GROUP: 'GROUP',
ofp.OFPAT_SET_NW_TTL: 'SET_NW_TTL',
ofp.OFPAT_DEC_NW_TTL: 'DEC_NW_TTL',
ofp.OFPAT_SET_FIELD: 'SET_FIELD'}
stats = dp.ofproto_parser.OFPGroupFeaturesStatsRequest(dp, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
features = []
for msg in msgs:
feature = msg.body
types = []
for k, v in type_convert.items():
if (1 << k) & feature.types:
types.append(v)
capabilities = []
for k, v in cap_convert.items():
if k & feature.capabilities:
capabilities.append(v)
max_groups = []
for k, v in type_convert.items():
max_groups.append({v: feature.max_groups[k]})
actions = []
for k1, v1 in type_convert.items():
acts = []
for k2, v2 in act_convert.items():
if (1 << k2) & feature.actions[k1]:
acts.append(v2)
actions.append({v1: acts})
f = {'types': types,
'capabilities': capabilities,
'max_groups': max_groups,
'actions': actions}
features.append(f)
features = {str(dp.id): features}
return features
def get_group_desc(dp, waiters):
type_convert = {dp.ofproto.OFPGT_ALL: 'ALL',
dp.ofproto.OFPGT_SELECT: 'SELECT',
dp.ofproto.OFPGT_INDIRECT: 'INDIRECT',
dp.ofproto.OFPGT_FF: 'FF'}
stats = dp.ofproto_parser.OFPGroupDescStatsRequest(dp, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
descs = []
for msg in msgs:
for stats in msg.body:
buckets = []
for bucket in stats.buckets:
actions = []
for action in bucket.actions:
actions.append(action_to_str(action))
b = {'weight': bucket.weight,
'watch_port': bucket.watch_port,
'watch_group': bucket.watch_group,
'actions': actions}
buckets.append(b)
d = {'type': type_convert.get(stats.type),
'group_id': stats.group_id,
'buckets': buckets}
descs.append(d)
descs = {str(dp.id): descs}
return descs
def get_port_desc(dp, waiters):
stats = dp.ofproto_parser.OFPFeaturesRequest(dp)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
descs = []
for msg in msgs:
stats = msg.ports
for stat in stats.values():
d = {'port_no': stat.port_no,
'hw_addr': stat.hw_addr,
'name': stat.name,
'config': stat.config,
'state': stat.state,
'curr': stat.curr,
'advertised': stat.advertised,
'supported': stat.supported,
'peer': stat.peer,
'curr_speed': stat.curr_speed,
'max_speed': stat.max_speed}
descs.append(d)
descs = {str(dp.id): descs}
return descs
def mod_flow_entry(dp, flow, cmd):
cookie = int(flow.get('cookie', 0))
cookie_mask = int(flow.get('cookie_mask', 0))
table_id = int(flow.get('table_id', 0))
idle_timeout = int(flow.get('idle_timeout', 0))
hard_timeout = int(flow.get('hard_timeout', 0))
priority = int(flow.get('priority', 0))
buffer_id = int(flow.get('buffer_id', dp.ofproto.OFP_NO_BUFFER))
out_port = int(flow.get('out_port', dp.ofproto.OFPP_ANY))
out_group = int(flow.get('out_group', dp.ofproto.OFPG_ANY))
flags = int(flow.get('flags', 0))
match = to_match(dp, flow.get('match', {}))
inst = to_actions(dp, flow.get('actions', []))
flow_mod = dp.ofproto_parser.OFPFlowMod(
dp, cookie, cookie_mask, table_id, cmd, idle_timeout,
hard_timeout, priority, buffer_id, out_port, out_group,
flags, match, inst)
dp.send_msg(flow_mod)
def mod_group_entry(dp, group, cmd):
type_convert = {'ALL': dp.ofproto.OFPGT_ALL,
'SELECT': dp.ofproto.OFPGT_SELECT,
'INDIRECT': dp.ofproto.OFPGT_INDIRECT,
'FF': dp.ofproto.OFPGT_FF}
type_ = type_convert.get(group.get('type', 'ALL'))
if type_ is None:
LOG.debug('Unknown type: %s', group.get('type'))
group_id = int(group.get('group_id', 0))
buckets = []
for bucket in group.get('buckets', []):
weight = int(bucket.get('weight', 0))
watch_port = int(bucket.get('watch_port', dp.ofproto.OFPP_ANY))
watch_group = int(bucket.get('watch_group', dp.ofproto.OFPG_ANY))
actions = []
for dic in bucket.get('actions', []):
action = to_action(dp, dic)
if action is not None:
actions.append(action)
buckets.append(dp.ofproto_parser.OFPBucket(
weight, watch_port, watch_group, actions))
group_mod = dp.ofproto_parser.OFPGroupMod(
dp, cmd, type_, group_id, buckets)
dp.send_msg(group_mod)
def mod_port_behavior(dp, port_config):
port_no = int(port_config.get('port_no', 0))
hw_addr = port_config.get('hw_addr')
config = int(port_config.get('config', 0))
mask = int(port_config.get('mask', 0))
advertise = int(port_config.get('advertise'))
port_mod = dp.ofproto_parser.OFPPortMod(
dp, port_no, hw_addr, config, mask, advertise)
dp.send_msg(port_mod)
def send_experimenter(dp, exp):
experimenter = exp.get('experimenter', 0)
exp_type = exp.get('exp_type', 0)
data_type = exp.get('data_type', 'ascii')
if data_type != 'ascii' and data_type != 'base64':
LOG.debug('Unknown data type: %s', data_type)
data = exp.get('data', '')
if data_type == 'base64':
data = base64.b64decode(data)
expmsg = dp.ofproto_parser.OFPExperimenter(
dp, experimenter, exp_type, data)
dp.send_msg(expmsg)<|fim▁end|> | ofp.OFPQ_ALL) |
<|file_name|>gproc.e.py<|end_file_name|><|fim▁begin|>'''
This script demonstrates how to create a periodic Gaussian process
using the *gpiso* function.
'''
import numpy as np
import matplotlib.pyplot as plt
from sympy import sin, exp, pi
from rbf.basis import get_r, get_eps, RBF
from rbf.gproc import gpiso
np.random.seed(1)
period = 5.0
cls = 0.5 # characteristic length scale
var = 1.0 # variance
r = get_r() # get symbolic variables
eps = get_eps()
# create a symbolic expression of the periodic covariance function
expr = exp(-sin(r*pi/period)**2/eps**2) <|fim▁hole|>gp = gpiso(basis, eps=cls, var=var)
t = np.linspace(-10, 10, 1000)[:,None]
sample = gp.sample(t) # draw a sample
mu,sigma = gp(t) # evaluate mean and std. dev.
# plot the results
fig,ax = plt.subplots(figsize=(6,4))
ax.grid(True)
ax.plot(t[:,0], mu, 'b-', label='mean')
ax.fill_between(
t[:,0], mu - sigma, mu + sigma,
color='b', alpha=0.2, edgecolor='none', label='std. dev.')
ax.plot(t, sample, 'k', label='sample')
ax.set_xlim((-10.0, 10.0))
ax.set_ylim((-2.5*var, 2.5*var))
ax.legend(loc=4, fontsize=10)
ax.tick_params(labelsize=10)
ax.set_xlabel('time', fontsize=10)
ax.set_title('periodic Gaussian process', fontsize=10)
fig.tight_layout()
plt.savefig('../figures/gproc.e.png')
plt.show()<|fim▁end|> | # define a periodic RBF using the symbolic expression
basis = RBF(expr)
# define a Gaussian process using the periodic RBF |
<|file_name|>console.rs<|end_file_name|><|fim▁begin|>// Copyright © 2018 Cormac O'Brien
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software
// and associated documentation files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
// BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use std::{cell::RefCell, rc::Rc};
use crate::common::console::Console;
use failure::Error;
use winit::event::{ElementState, Event, KeyboardInput, VirtualKeyCode as Key, WindowEvent};
pub struct ConsoleInput {
console: Rc<RefCell<Console>>,
}
<|fim▁hole|>impl ConsoleInput {
pub fn new(console: Rc<RefCell<Console>>) -> ConsoleInput {
ConsoleInput { console }
}
pub fn handle_event<T>(&self, event: Event<T>) -> Result<(), Error> {
match event {
Event::WindowEvent { event, .. } => match event {
WindowEvent::ReceivedCharacter(c) => self.console.borrow_mut().send_char(c),
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(key),
state: ElementState::Pressed,
..
},
..
} => match key {
Key::Up => self.console.borrow_mut().history_up(),
Key::Down => self.console.borrow_mut().history_down(),
Key::Left => self.console.borrow_mut().cursor_left(),
Key::Right => self.console.borrow_mut().cursor_right(),
Key::Grave => self.console.borrow_mut().stuff_text("toggleconsole\n"),
_ => (),
},
_ => (),
},
_ => (),
}
Ok(())
}
}<|fim▁end|> | |
<|file_name|>lifetime_tok.rs<|end_file_name|><|fim▁begin|>use lifetime_tok_lib::LtTok;
extern crate lalrpop_util as __lalrpop_util;
mod __parse__Expr {
#![allow(non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports)]
use lifetime_tok_lib::LtTok;
extern crate lalrpop_util as __lalrpop_util;
use super::__ToTriple;
pub fn parse_Expr<
'input,
__TOKEN: __ToTriple<'input, Error=()>,
__TOKENS: IntoIterator<Item=__TOKEN>,
>(
__tokens0: __TOKENS,
) -> Result<Vec<&'input str>, __lalrpop_util::ParseError<(),LtTok<'input>,()>> where
__TOKENS: Clone,
{
let __ascent = __ascent::parse_Expr(
__tokens0.clone(),
);
let __parse_table = __parse_table::parse_Expr(
__tokens0.clone(),
);
assert_eq!(__ascent, __parse_table);
return __ascent;
}
mod __ascent {
mod __parse__Expr {
#![allow(non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports)]
use lifetime_tok_lib::LtTok;
extern crate lalrpop_util as __lalrpop_util;
use super::super::super::__ToTriple;
pub fn parse_Expr<
'input,
__TOKEN: __ToTriple<'input, Error=()>,
__TOKENS: IntoIterator<Item=__TOKEN>,
>(
__tokens0: __TOKENS,
) -> Result<Vec<&'input str>, __lalrpop_util::ParseError<(),LtTok<'input>,()>>
{
let __tokens = __tokens0.into_iter();
let mut __tokens = __tokens.map(|t| __ToTriple::to_triple(t));
let __lookahead = match __tokens.next() {
Some(Ok(v)) => Some(v),
None => None,
Some(Err(e)) => return Err(__lalrpop_util::ParseError::User { error: e }),
};
match try!(__state0(&mut __tokens, __lookahead, ::std::marker::PhantomData::<()>)) {
(Some(__lookahead), _) => {
Err(__lalrpop_util::ParseError::ExtraToken { token: __lookahead })
}
(None, __Nonterminal::____Expr((_, __nt, _))) => {
Ok(__nt)
}
_ => unreachable!(),
}
}
#[allow(dead_code)]
pub enum __Nonterminal<'input> {
Expr(((), Vec<&'input str>, ())),
Other_2a(((), ::std::vec::Vec<&'input str>, ())),
Other_2b(((), ::std::vec::Vec<&'input str>, ())),
____Expr(((), Vec<&'input str>, ())),
}
// State 0
// AllInputs = []
// OptionalInputs = []
// FixedInputs = []
// WillPushLen = 0
// WillPush = []
// WillProduce = None
//
// Expr = (*) [EOF]
// Expr = (*) Other+ [EOF]
// Other+ = (*) Other+ Other [Other]
// Other+ = (*) Other+ Other [EOF]
// Other+ = (*) Other [Other]
// Other+ = (*) Other [EOF]
// __Expr = (*) Expr [EOF]
//
// Other -> S3
// [EOF] -> Expr = => ActionFn(6);
//
// Expr -> S1
// Other+ -> S2
pub fn __state0<
'input,
__TOKENS: Iterator<Item=Result<((), LtTok<'input>, ()),()>>,
>(
__tokens: &mut __TOKENS,
__lookahead: Option<((), LtTok<'input>, ())>,
_: ::std::marker::PhantomData<()>,
) -> Result<(Option<((), LtTok<'input>, ())>, __Nonterminal<'input>), __lalrpop_util::ParseError<(),LtTok<'input>,()>>
{
let mut __result: (Option<((), LtTok<'input>, ())>, __Nonterminal<'input>);
match __lookahead {
Some((__loc1, LtTok::Other(__tok0), __loc2)) => {
let __sym0 = (__loc1, (__tok0), __loc2);
__result = try!(__state3(__tokens, __sym0, ::std::marker::PhantomData::<()>));
}
None => {
let __start: () = ::std::default::Default::default();
let __end = __lookahead.as_ref().map(|o| o.0.clone()).unwrap_or_else(|| __start.clone());
let __nt = super::super::super::__action6::<>(&__start, &__end);
let __nt = __Nonterminal::Expr((
__start,
__nt,
__end,
));
__result = (__lookahead, __nt);
}
_ => {
return Err(__lalrpop_util::ParseError::UnrecognizedToken {
token: __lookahead,
expected: vec![],
});
}
}
loop {
let (__lookahead, __nt) = __result;
match __nt {
__Nonterminal::Expr(__sym0) => {
__result = try!(__state1(__tokens, __lookahead, __sym0, ::std::marker::PhantomData::<()>));
}
__Nonterminal::Other_2b(__sym0) => {
__result = try!(__state2(__tokens, __lookahead, __sym0, ::std::marker::PhantomData::<()>));
}
_ => {
return Ok((__lookahead, __nt));
}
}
}
}
// State 1
// AllInputs = [Expr]
// OptionalInputs = []
// FixedInputs = [Expr]
// WillPushLen = 0
// WillPush = []
// WillProduce = Some(__Expr)
//
// __Expr = Expr (*) [EOF]
//
// [EOF] -> __Expr = Expr => ActionFn(0);
//
pub fn __state1<
'input,
__TOKENS: Iterator<Item=Result<((), LtTok<'input>, ()),()>>,
>(
__tokens: &mut __TOKENS,
__lookahead: Option<((), LtTok<'input>, ())>,
__sym0: ((), Vec<&'input str>, ()),
_: ::std::marker::PhantomData<()>,
) -> Result<(Option<((), LtTok<'input>, ())>, __Nonterminal<'input>), __lalrpop_util::ParseError<(),LtTok<'input>,()>>
{
let mut __result: (Option<((), LtTok<'input>, ())>, __Nonterminal<'input>);
match __lookahead {
None => {
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::super::super::__action0::<>(__sym0);
let __nt = __Nonterminal::____Expr((
__start,
__nt,
__end,
));
__result = (__lookahead, __nt);
return Ok(__result);
}
_ => {
return Err(__lalrpop_util::ParseError::UnrecognizedToken {
token: __lookahead,
expected: vec![],
});
}
}
}
// State 2
// AllInputs = [Other+]
// OptionalInputs = []
// FixedInputs = [Other+]
// WillPushLen = 0
// WillPush = []
// WillProduce = None
//
// Expr = Other+ (*) [EOF]
// Other+ = Other+ (*) Other [Other, EOF]
//
// Other -> S4
// [EOF] -> Expr = Other+ => ActionFn(7);
//
pub fn __state2<
'input,
__TOKENS: Iterator<Item=Result<((), LtTok<'input>, ()),()>>,
>(
__tokens: &mut __TOKENS,
__lookahead: Option<((), LtTok<'input>, ())>,
__sym0: ((), ::std::vec::Vec<&'input str>, ()),
_: ::std::marker::PhantomData<()>,
) -> Result<(Option<((), LtTok<'input>, ())>, __Nonterminal<'input>), __lalrpop_util::ParseError<(),LtTok<'input>,()>>
{
let mut __result: (Option<((), LtTok<'input>, ())>, __Nonterminal<'input>);
match __lookahead {
Some((__loc1, LtTok::Other(__tok0), __loc2)) => {
let __sym1 = (__loc1, (__tok0), __loc2);
__result = try!(__state4(__tokens, __sym0, __sym1, ::std::marker::PhantomData::<()>));
return Ok(__result);
}
None => {
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::super::super::__action7::<>(__sym0);
let __nt = __Nonterminal::Expr((
__start,
__nt,
__end,
));
__result = (__lookahead, __nt);
return Ok(__result);
}
_ => {
return Err(__lalrpop_util::ParseError::UnrecognizedToken {
token: __lookahead,
expected: vec![],
});
}
}
}
// State 3
// AllInputs = [Other]
// OptionalInputs = []
// FixedInputs = [Other]
// WillPushLen = 0
// WillPush = []
// WillProduce = Some(Other+)
//
// Other+ = Other (*) [Other, EOF]
//
// [Other, EOF] -> Other+ = Other => ActionFn(4);
//
pub fn __state3<
'input,
__TOKENS: Iterator<Item=Result<((), LtTok<'input>, ()),()>>,
>(
__tokens: &mut __TOKENS,
__sym0: ((), &'input str, ()),
_: ::std::marker::PhantomData<()>,
) -> Result<(Option<((), LtTok<'input>, ())>, __Nonterminal<'input>), __lalrpop_util::ParseError<(),LtTok<'input>,()>>
{
let mut __result: (Option<((), LtTok<'input>, ())>, __Nonterminal<'input>);
let __lookahead = match __tokens.next() {
Some(Ok(v)) => Some(v),
None => None,
Some(Err(e)) => return Err(__lalrpop_util::ParseError::User { error: e }),
};
match __lookahead {
Some((_, LtTok::Other(_), _)) |
None => {
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::super::super::__action4::<>(__sym0);
let __nt = __Nonterminal::Other_2b((
__start,
__nt,
__end,
));
__result = (__lookahead, __nt);
return Ok(__result);
}
_ => {
return Err(__lalrpop_util::ParseError::UnrecognizedToken {
token: __lookahead,
expected: vec![],
});
}
}
}
// State 4
// AllInputs = [Other+, Other]
// OptionalInputs = []
// FixedInputs = [Other+, Other]
// WillPushLen = 0
// WillPush = []
// WillProduce = Some(Other+)
//
// Other+ = Other+ Other (*) [Other, EOF]
//
// [Other, EOF] -> Other+ = Other+, Other => ActionFn(5);
//
pub fn __state4<
'input,
__TOKENS: Iterator<Item=Result<((), LtTok<'input>, ()),()>>,
>(
__tokens: &mut __TOKENS,
__sym0: ((), ::std::vec::Vec<&'input str>, ()),
__sym1: ((), &'input str, ()),
_: ::std::marker::PhantomData<()>,
) -> Result<(Option<((), LtTok<'input>, ())>, __Nonterminal<'input>), __lalrpop_util::ParseError<(),LtTok<'input>,()>>
{
let mut __result: (Option<((), LtTok<'input>, ())>, __Nonterminal<'input>);
let __lookahead = match __tokens.next() {
Some(Ok(v)) => Some(v),
None => None,
Some(Err(e)) => return Err(__lalrpop_util::ParseError::User { error: e }),
};
match __lookahead {
Some((_, LtTok::Other(_), _)) |
None => {
let __start = __sym0.0.clone();
let __end = __sym1.2.clone();
let __nt = super::super::super::__action5::<>(__sym0, __sym1);
let __nt = __Nonterminal::Other_2b((
__start,
__nt,
__end,
));
__result = (__lookahead, __nt);
return Ok(__result);
}
_ => {
return Err(__lalrpop_util::ParseError::UnrecognizedToken {
token: __lookahead,
expected: vec![],
});
}
}
}
}
pub use self::__parse__Expr::parse_Expr;
}
mod __parse_table {
mod __parse__Expr {
#![allow(non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports)]
use lifetime_tok_lib::LtTok;
extern crate lalrpop_util as __lalrpop_util;
use super::super::super::__ToTriple;
#[allow(dead_code)]
pub enum __Symbol<'input> {
TermOther(&'input str),
NtExpr(Vec<&'input str>),
NtOther_2a(::std::vec::Vec<&'input str>),
NtOther_2b(::std::vec::Vec<&'input str>),
Nt____Expr(Vec<&'input str>),
}
const __ACTION: &'static [i32] = &[
// State 0
// Expr = (*) [EOF]
// Expr = (*) Other+ [EOF]
// Other+ = (*) Other+ Other [Other]
// Other+ = (*) Other+ Other [EOF]
// Other+ = (*) Other [Other]
// Other+ = (*) Other [EOF]
// __Expr = (*) Expr [EOF]
4, // on Other, goto 3
// State 1
// __Expr = Expr (*) [EOF]
0, // on Other, error
// State 2
// Expr = Other+ (*) [EOF]
// Other+ = Other+ (*) Other [Other, EOF]
5, // on Other, goto 4
// State 3
// Other+ = Other (*) [Other, EOF]
-5, // on Other, reduce `Other+ = Other => ActionFn(4);`
// State 4
// Other+ = Other+ Other (*) [Other, EOF]
-6, // on Other, reduce `Other+ = Other+, Other => ActionFn(5);`
];
const __EOF_ACTION: &'static [i32] = &[
-1, // on EOF, reduce `Expr = => ActionFn(6);`
-7, // on EOF, reduce `__Expr = Expr => ActionFn(0);`
-2, // on EOF, reduce `Expr = Other+ => ActionFn(7);`
-5, // on EOF, reduce `Other+ = Other => ActionFn(4);`
-6, // on EOF, reduce `Other+ = Other+, Other => ActionFn(5);`
];
const __GOTO: &'static [i32] = &[
// State 0
2, // on Expr, goto 1
0, // on Other*, error
3, // on Other+, goto 2
0, // on __Expr, error
// State 1
0, // on Expr, error
0, // on Other*, error
0, // on Other+, error
0, // on __Expr, error
// State 2
0, // on Expr, error
0, // on Other*, error
0, // on Other+, error
0, // on __Expr, error
// State 3
0, // on Expr, error
0, // on Other*, error
0, // on Other+, error
0, // on __Expr, error
// State 4
0, // on Expr, error
0, // on Other*, error
0, // on Other+, error
0, // on __Expr, error
];
pub fn parse_Expr<
'input,
__TOKEN: __ToTriple<'input, Error=()>,
__TOKENS: IntoIterator<Item=__TOKEN>,
>(
__tokens0: __TOKENS,
) -> Result<Vec<&'input str>, __lalrpop_util::ParseError<(),LtTok<'input>,()>>
{
let __tokens = __tokens0.into_iter();
let mut __tokens = __tokens.map(|t| __ToTriple::to_triple(t));
let mut __states = vec![0_i32];
let mut __symbols = vec![];
'__shift: loop {
let __lookahead = match __tokens.next() {
Some(Ok(v)) => v,
None => break '__shift,
Some(Err(e)) => return Err(__lalrpop_util::ParseError::User { error: e }),
};
let __integer = match __lookahead {
(_, LtTok::Other(_), _) if true => 0,
_ => {
return Err(__lalrpop_util::ParseError::UnrecognizedToken {
token: Some(__lookahead),
expected: vec![],
});
}
};
loop {
let __state = *__states.last().unwrap() as usize;
let __action = __ACTION[__state * 1 + __integer];
if __action > 0 {
let __symbol = match __integer {
0 => match __lookahead.1 {
LtTok::Other(__tok0) => __Symbol::TermOther(__tok0),
_ => unreachable!(),
},
_ => unreachable!(),
};
__states.push(__action - 1);
__symbols.push((__lookahead.0, __symbol, __lookahead.2));
continue '__shift;
} else if __action < 0 {
if let Some(r) = __reduce(__action, Some(&__lookahead.0), &mut __states, &mut __symbols, ::std::marker::PhantomData::<()>) {
return r;
}
} else {
return Err(__lalrpop_util::ParseError::UnrecognizedToken {
token: Some(__lookahead),
expected: vec![],
});
}
}
}
loop {
let __state = *__states.last().unwrap() as usize;
let __action = __EOF_ACTION[__state];
if __action < 0 {
if let Some(r) = __reduce(__action, None, &mut __states, &mut __symbols, ::std::marker::PhantomData::<()>) {
return r;
}
} else {
return Err(__lalrpop_util::ParseError::UnrecognizedToken {
token: None,
expected: vec![],
});
}
}
}
pub fn __reduce<
'input,
>(
__action: i32,
__lookahead_start: Option<&()>,
__states: &mut ::std::vec::Vec<i32>,
__symbols: &mut ::std::vec::Vec<((),__Symbol<'input>,())>,
_: ::std::marker::PhantomData<()>,
) -> Option<Result<Vec<&'input str>,__lalrpop_util::ParseError<(),LtTok<'input>,()>>>
{
let __nonterminal = match -__action {
1 => {
// Expr = => ActionFn(6);
let __start = __symbols.last().map(|s| s.2.clone()).unwrap_or_default();
let __end = __lookahead_start.cloned().unwrap_or_else(|| __start.clone());
let __nt = super::super::super::__action6::<>(&__start, &__end);
let __states_len = __states.len();
__states.truncate(__states_len - 0);
__symbols.push((__start, __Symbol::NtExpr(__nt), __end));
0
}
2 => {
// Expr = Other+ => ActionFn(7);
let __sym0 = __pop_NtOther_2b(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::super::super::__action7::<>(__sym0);
let __states_len = __states.len();
__states.truncate(__states_len - 1);
__symbols.push((__start, __Symbol::NtExpr(__nt), __end));
0
}
3 => {
// Other* = => ActionFn(2);
let __start = __symbols.last().map(|s| s.2.clone()).unwrap_or_default();
let __end = __lookahead_start.cloned().unwrap_or_else(|| __start.clone());
let __nt = super::super::super::__action2::<>(&__start, &__end);
let __states_len = __states.len();
__states.truncate(__states_len - 0);
__symbols.push((__start, __Symbol::NtOther_2a(__nt), __end));
1
}
4 => {
// Other* = Other+ => ActionFn(3);
let __sym0 = __pop_NtOther_2b(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::super::super::__action3::<>(__sym0);
let __states_len = __states.len();
__states.truncate(__states_len - 1);
__symbols.push((__start, __Symbol::NtOther_2a(__nt), __end));
1
}
5 => {
// Other+ = Other => ActionFn(4);
let __sym0 = __pop_TermOther(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::super::super::__action4::<>(__sym0);
let __states_len = __states.len();
__states.truncate(__states_len - 1);
__symbols.push((__start, __Symbol::NtOther_2b(__nt), __end));
2
}
6 => {
// Other+ = Other+, Other => ActionFn(5);
let __sym1 = __pop_TermOther(__symbols);
let __sym0 = __pop_NtOther_2b(__symbols);
let __start = __sym0.0.clone();
let __end = __sym1.2.clone();
let __nt = super::super::super::__action5::<>(__sym0, __sym1);
let __states_len = __states.len();
__states.truncate(__states_len - 2);
__symbols.push((__start, __Symbol::NtOther_2b(__nt), __end));
2
}
7 => {
// __Expr = Expr => ActionFn(0);
let __sym0 = __pop_NtExpr(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::super::super::__action0::<>(__sym0);
return Some(Ok(__nt));
}
_ => panic!("invalid action code {}", __action)
};
let __state = *__states.last().unwrap() as usize;
let __next_state = __GOTO[__state * 4 + __nonterminal] - 1;
__states.push(__next_state);
None
}
fn __pop_TermOther<
'input,
>(
__symbols: &mut ::std::vec::Vec<((),__Symbol<'input>,())>
) -> ((), &'input str, ()) {
match __symbols.pop().unwrap() {
(__l, __Symbol::TermOther(__v), __r) => (__l, __v, __r),
_ => panic!("symbol type mismatch")
}
}
fn __pop_NtExpr<
'input,
>(
__symbols: &mut ::std::vec::Vec<((),__Symbol<'input>,())>
) -> ((), Vec<&'input str>, ()) {
match __symbols.pop().unwrap() {
(__l, __Symbol::NtExpr(__v), __r) => (__l, __v, __r),
_ => panic!("symbol type mismatch")
}
}
fn __pop_NtOther_2a<
'input,
>(
__symbols: &mut ::std::vec::Vec<((),__Symbol<'input>,())>
) -> ((), ::std::vec::Vec<&'input str>, ()) {
match __symbols.pop().unwrap() {
(__l, __Symbol::NtOther_2a(__v), __r) => (__l, __v, __r),
_ => panic!("symbol type mismatch")
}
}
fn __pop_NtOther_2b<
'input,
>(
__symbols: &mut ::std::vec::Vec<((),__Symbol<'input>,())>
) -> ((), ::std::vec::Vec<&'input str>, ()) {
match __symbols.pop().unwrap() {
(__l, __Symbol::NtOther_2b(__v), __r) => (__l, __v, __r),
_ => panic!("symbol type mismatch")
}
}
fn __pop_Nt____Expr<
'input,
>(
__symbols: &mut ::std::vec::Vec<((),__Symbol<'input>,())>
) -> ((), Vec<&'input str>, ()) {
match __symbols.pop().unwrap() {
(__l, __Symbol::Nt____Expr(__v), __r) => (__l, __v, __r),
_ => panic!("symbol type mismatch")
}
}
}
pub use self::__parse__Expr::parse_Expr;
}
}
pub use self::__parse__Expr::parse_Expr;
pub fn __action0<
'input,
>(
(_, __0, _): ((), Vec<&'input str>, ()),
) -> Vec<&'input str>
{
(__0)
}
pub fn __action1<
'input,
>(
(_, __0, _): ((), ::std::vec::Vec<&'input str>, ()),
) -> Vec<&'input str>
{
(__0)
}
pub fn __action2<
'input,
>(
__lookbehind: &(),
__lookahead: &(),
) -> ::std::vec::Vec<&'input str>
{
vec![]
}
pub fn __action3<
'input,
>(
(_, v, _): ((), ::std::vec::Vec<&'input str>, ()),
) -> ::std::vec::Vec<&'input str>
{
v
}<|fim▁hole|>
pub fn __action4<
'input,
>(
(_, __0, _): ((), &'input str, ()),
) -> ::std::vec::Vec<&'input str>
{
vec![__0]
}
pub fn __action5<
'input,
>(
(_, v, _): ((), ::std::vec::Vec<&'input str>, ()),
(_, e, _): ((), &'input str, ()),
) -> ::std::vec::Vec<&'input str>
{
{ let mut v = v; v.push(e); v }
}
pub fn __action6<
'input,
>(
__lookbehind: &(),
__lookahead: &(),
) -> Vec<&'input str>
{
let __start0 = __lookbehind.clone();
let __end0 = __lookahead.clone();
let __temp0 = __action2(
&__start0,
&__end0,
);
let __temp0 = (__start0, __temp0, __end0);
__action1(
__temp0,
)
}
pub fn __action7<
'input,
>(
__0: ((), ::std::vec::Vec<&'input str>, ()),
) -> Vec<&'input str>
{
let __start0 = __0.0.clone();
let __end0 = __0.2.clone();
let __temp0 = __action3(
__0,
);
let __temp0 = (__start0, __temp0, __end0);
__action1(
__temp0,
)
}
pub trait __ToTriple<'input, > {
type Error;
fn to_triple(value: Self) -> Result<((),LtTok<'input>,()),Self::Error>;
}
impl<'input, > __ToTriple<'input, > for LtTok<'input> {
type Error = ();
fn to_triple(value: Self) -> Result<((),LtTok<'input>,()),()> {
Ok(((), value, ()))
}
}
impl<'input, > __ToTriple<'input, > for Result<(LtTok<'input>),()> {
type Error = ();
fn to_triple(value: Self) -> Result<((),LtTok<'input>,()),()> {
value.map(|v| ((), v, ()))
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import event_event
from . import event_registration<|fim▁hole|>from . import event_type
from . import website
from . import website_event_menu
from . import website_menu
from . import website_visitor<|fim▁end|> | |
<|file_name|>test_web.py<|end_file_name|><|fim▁begin|>import csv
import json
import sys
import traceback
from six.moves import StringIO
import requests
import mock
import gevent
from gevent import wsgi
from locust import web, runners, stats
from locust.runners import LocustRunner
from locust.main import parse_options
from .testcases import LocustTestCase
class TestWebUI(LocustTestCase):
def setUp(self):
super(TestWebUI, self).setUp()
<|fim▁hole|> runners.locust_runner = LocustRunner([], options)
self._web_ui_server = wsgi.WSGIServer(('127.0.0.1', 0), web.app, log=None)
gevent.spawn(lambda: self._web_ui_server.serve_forever())
gevent.sleep(0.01)
self.web_port = self._web_ui_server.server_port
def tearDown(self):
super(TestWebUI, self).tearDown()
self._web_ui_server.stop()
def test_index(self):
self.assertEqual(200, requests.get("http://127.0.0.1:%i/" % self.web_port).status_code)
def test_stats_no_data(self):
self.assertEqual(200, requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).status_code)
def test_stats(self):
stats.global_stats.get("/test", "GET").log(120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
data = json.loads(response.text)
self.assertEqual(2, len(data["stats"])) # one entry plus Total
self.assertEqual("/test", data["stats"][0]["name"])
self.assertEqual("GET", data["stats"][0]["method"])
self.assertEqual(120, data["stats"][0]["avg_response_time"])
def test_stats_cache(self):
stats.global_stats.get("/test", "GET").log(120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
data = json.loads(response.text)
self.assertEqual(2, len(data["stats"])) # one entry plus Total
# add another entry
stats.global_stats.get("/test2", "GET").log(120, 5612)
data = json.loads(requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).text)
self.assertEqual(2, len(data["stats"])) # old value should be cached now
web.request_stats.clear_cache()
data = json.loads(requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).text)
self.assertEqual(3, len(data["stats"])) # this should no longer be cached
def test_request_stats_csv(self):
stats.global_stats.get("/test", "GET").log(120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests/csv" % self.web_port)
self.assertEqual(200, response.status_code)
def test_distribution_stats_csv(self):
stats.global_stats.get("/test", "GET").log(120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/distribution/csv" % self.web_port)
self.assertEqual(200, response.status_code)
def test_exceptions_csv(self):
try:
raise Exception("Test exception")
except Exception as e:
tb = sys.exc_info()[2]
runners.locust_runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
runners.locust_runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
response = requests.get("http://127.0.0.1:%i/exceptions/csv" % self.web_port)
self.assertEqual(200, response.status_code)
reader = csv.reader(StringIO(response.text))
rows = []
for row in reader:
rows.append(row)
self.assertEqual(2, len(rows))
self.assertEqual("Test exception", rows[1][1])
self.assertEqual(2, int(rows[1][0]), "Exception count should be 2")<|fim▁end|> | stats.global_stats.clear_all()
parser = parse_options()[0]
options = parser.parse_args([])[0] |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import sys
class MyTest(TestCommand):
def run_tests(self):
tests = unittest.TestLoader().discover('tests', pattern='test_*.py')
unittest.TextTestRunner(verbosity=1).run(tests)
setup(
name='flask_restapi',
version='0.2.0',
license='MIT',
description=u'A simple rest query framework by flask, peewee, rest_query',
author='dracarysX',
author_email='[email protected]',
url='https://github.com/dracarysX/flask_restapi',
packages=find_packages(include=['flask_restapi']),
install_requires=[
'peewee',
'flask',
'wtforms',
'flask_bcrypt',
'flask-script',
'peewee-rest-query'
],
test_suite='nose.collector',
tests_require=['nose'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: MIT',
],
keywords='Python, Flask, APIMethodView, Filtering Query API, Mysql, Peewee, RestAPI',
long_description='A simple rest query framework. Web framework use flask, '<|fim▁hole|>)<|fim▁end|> | 'orm by peewee, form by wtform and query by rest_query.'
'The framework implements custom query api(like this: /?select=id,name&id=gte.20), '
'save form data, model object serializer, APIMethodView(get, post, put,delete) and errorhandler.' |
<|file_name|>controller.js<|end_file_name|><|fim▁begin|>import Ember from 'ember';
import Sortable from 'ui/mixins/sortable';
export default Ember.Controller.extend(Sortable, {<|fim▁hole|> ip: ['displayIp','name','id'],
image: ['imageUuid','id'],
},
});<|fim▁end|> | sortBy: 'name',
sorts: {
state: ['stateSort','name','id'],
name: ['name','id'], |
<|file_name|>listing_8_21.py<|end_file_name|><|fim▁begin|>import sys
import os
import Image
def simpleQuant():
im = Image.open('bubbles.jpg')
w,h = im.size
for row in range(h):
for col in range(w):
r,g,b = im.getpixel((col,row))<|fim▁hole|> im.putpixel((col,row),(r,g,b))
im.show()<|fim▁end|> | r = r // 36 * 36
g = g // 42 * 42
b = b // 42 * 42 |
<|file_name|>scheduler.cpp<|end_file_name|><|fim▁begin|>/* This file is part of the KDE libraries
Copyright (C) 2000 Stephan Kulow <[email protected]>
Waldo Bastian <[email protected]>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License version 2 as published by the Free Software Foundation.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this library; see the file COPYING.LIB. If not, write to
the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
#include "kio/sessiondata.h"
#include "kio/slaveconfig.h"
#include "kio/scheduler.h"
#include "kio/authinfo.h"
#include "kio/slave.h"
#include <qptrlist.h>
#include <qdict.h>
#include <dcopclient.h>
#include <kdebug.h>
#include <kglobal.h>
#include <kprotocolmanager.h>
#include <kprotocolinfo.h>
#include <assert.h>
#include <kstaticdeleter.h>
#include <kdesu/client.h>
// Slaves may be idle for MAX_SLAVE_IDLE time before they are being returned
// to the system wide slave pool. (3 minutes)
#define MAX_SLAVE_IDLE (3*60)
using namespace KIO;
template class QDict<KIO::Scheduler::ProtocolInfo>;
Scheduler *Scheduler::instance = 0;
class KIO::SlaveList: public QPtrList<Slave>
{
public:
SlaveList() { }
};
//
// There are two kinds of protocol:
// (1) The protocol of the url
// (2) The actual protocol that the io-slave uses.
//
// These two often match, but not necasserily. Most notably, they don't
// match when doing ftp via a proxy.
// In that case (1) is ftp, but (2) is http.
//
// JobData::protocol stores (2) while Job::url().protocol() returns (1).
// The ProtocolInfoDict is indexed with (2).
//
// We schedule slaves based on (2) but tell the slave about (1) via
// Slave::setProtocol().
class KIO::Scheduler::JobData
{
public:
JobData() : checkOnHold(false) { }
public:
QString protocol;
QString proxy;
bool checkOnHold;
};
class KIO::Scheduler::ExtraJobData: public QPtrDict<KIO::Scheduler::JobData>
{
public:
ExtraJobData() { setAutoDelete(true); }
};
class KIO::Scheduler::ProtocolInfo
{
public:
ProtocolInfo() : maxSlaves(1), skipCount(0)
{
joblist.setAutoDelete(false);
}
QPtrList<SimpleJob> joblist;
SlaveList activeSlaves;
int maxSlaves;
int skipCount;
QString protocol;
};
class KIO::Scheduler::ProtocolInfoDict : public QDict<KIO::Scheduler::ProtocolInfo>
{
public:
ProtocolInfoDict() { }
KIO::Scheduler::ProtocolInfo *get( const QString &protocol);
};
KIO::Scheduler::ProtocolInfo *
KIO::Scheduler::ProtocolInfoDict::get(const QString &protocol)
{
ProtocolInfo *info = find(protocol);
if (!info)
{
info = new ProtocolInfo;
info->protocol = protocol;
info->maxSlaves = KProtocolInfo::maxSlaves( protocol );
insert(protocol, info);
}
return info;
}
Scheduler::Scheduler()
: DCOPObject( "KIO::Scheduler" ),
QObject(kapp, "scheduler"),
slaveTimer(0, "Scheduler::slaveTimer"),
coSlaveTimer(0, "Scheduler::coSlaveTimer"),
cleanupTimer(0, "Scheduler::cleanupTimer")
{
checkOnHold = true; // !! Always check with KLauncher for the first request.
slaveOnHold = 0;
protInfoDict = new ProtocolInfoDict;
slaveList = new SlaveList;
idleSlaves = new SlaveList;
coIdleSlaves = new SlaveList;
extraJobData = new ExtraJobData;
sessionData = new SessionData;
slaveConfig = SlaveConfig::self();
connect(&slaveTimer, SIGNAL(timeout()), SLOT(startStep()));
connect(&coSlaveTimer, SIGNAL(timeout()), SLOT(slotScheduleCoSlave()));
connect(&cleanupTimer, SIGNAL(timeout()), SLOT(slotCleanIdleSlaves()));
busy = false;
}
Scheduler::~Scheduler()
{
protInfoDict->setAutoDelete(true);
delete protInfoDict; protInfoDict = 0;
delete idleSlaves; idleSlaves = 0;
delete coIdleSlaves; coIdleSlaves = 0;
slaveList->setAutoDelete(true);
delete slaveList; slaveList = 0;
delete extraJobData; extraJobData = 0;
delete sessionData; sessionData = 0;
instance = 0;
}
void
Scheduler::debug_info()
{
}
bool Scheduler::process(const QCString &fun, const QByteArray &data, QCString &replyType, QByteArray &replyData )
{
if ( fun != "reparseSlaveConfiguration(QString)" )
return DCOPObject::process( fun, data, replyType, replyData );
slaveConfig = SlaveConfig::self();
replyType = "void";
QDataStream stream( data, IO_ReadOnly );
QString proto;
stream >> proto;
kdDebug( 7006 ) << "reparseConfiguration( " << proto << " )" << endl;
KProtocolManager::reparseConfiguration();
slaveConfig->reset();
sessionData->reset();
NetRC::self()->reload();
Slave *slave = slaveList->first();
for (; slave; slave = slaveList->next() )
if ( slave->slaveProtocol() == proto || proto.isEmpty() )
{
slave->send( CMD_REPARSECONFIGURATION );
slave->resetHost();
}
return true;
}
QCStringList Scheduler::functions()
{
QCStringList funcs = DCOPObject::functions();
funcs << "void reparseSlaveConfiguration(QString)";
return funcs;
}
void Scheduler::_doJob(SimpleJob *job) {
JobData *jobData = new JobData;
jobData->protocol = KProtocolManager::slaveProtocol(job->url(), jobData->proxy);
// kdDebug(7006) << "Scheduler::_doJob protocol=" << jobData->protocol << endl;
if (job->command() == CMD_GET)
{
jobData->checkOnHold = checkOnHold;
checkOnHold = false;
}
extraJobData->replace(job, jobData);
newJobs.append(job);
slaveTimer.start(0, true);
#ifndef NDEBUG
if (newJobs.count() > 150)
kdDebug() << "WARNING - KIO::Scheduler got more than 150 jobs! This shows a misuse in your app (yes, a job is a QObject)." << endl;
#endif
}
void Scheduler::_scheduleJob(SimpleJob *job) {
newJobs.removeRef(job);
JobData *jobData = extraJobData->find(job);
if (!jobData)
{
kdFatal(7006) << "BUG! _ScheduleJob(): No extraJobData for job!" << endl;
return;
}
QString protocol = jobData->protocol;
// kdDebug(7006) << "Scheduler::_scheduleJob protocol=" << protocol << endl;
ProtocolInfo *protInfo = protInfoDict->get(protocol);
protInfo->joblist.append(job);
slaveTimer.start(0, true);
}
void Scheduler::_cancelJob(SimpleJob *job) {
// kdDebug(7006) << "Scheduler: canceling job " << job << endl;
Slave *slave = job->slave();
if ( !slave )
{
// was not yet running (don't call this on a finished job!)
JobData *jobData = extraJobData->find(job);
if (!jobData)
return; // I said: "Don't call this on a finished job!"
newJobs.removeRef(job);
ProtocolInfo *protInfo = protInfoDict->get(jobData->protocol);
protInfo->joblist.removeRef(job);
// Search all slaves to see if job is in the queue of a coSlave
slave = slaveList->first();
for(; slave; slave = slaveList->next())
{
JobList *list = coSlaves.find(slave);
if (list && list->removeRef(job))
break; // Job was found and removed.
// Fall through to kill the slave as well!
}
if (!slave)
{
extraJobData->remove(job);
return; // Job was not yet running and not in a coSlave queue.
}
}
kdDebug(7006) << "Scheduler: killing slave " << slave->slave_pid() << endl;
slave->kill();
_jobFinished( job, slave );
slotSlaveDied( slave);
}
void Scheduler::startStep()
{
while(newJobs.count())
{
(void) startJobDirect();
}
QDictIterator<KIO::Scheduler::ProtocolInfo> it(*protInfoDict);
while(it.current())
{
if (startJobScheduled(it.current())) return;
++it;
}
}
void Scheduler::setupSlave(KIO::Slave *slave, const KURL &url, const QString &protocol, const QString &proxy , bool newSlave, const KIO::MetaData *config)
{
QString host = url.host();
int port = url.port();
QString user = url.user();
QString passwd = url.pass();
if ((newSlave) ||
(slave->host() != host) ||
(slave->port() != port) ||
(slave->user() != user) ||
(slave->passwd() != passwd))
{
slaveConfig = SlaveConfig::self();
MetaData configData = slaveConfig->configData(protocol, host);
sessionData->configDataFor( configData, protocol, host );
configData["UseProxy"] = proxy;
QString autoLogin = configData["EnableAutoLogin"].lower();
if ( autoLogin == "true" )
{
NetRC::AutoLogin l;
l.login = user;
bool usern = (protocol == "ftp");
if ( NetRC::self()->lookup( url, l, usern) )
{
configData["autoLoginUser"] = l.login;
configData["autoLoginPass"] = l.password;
if ( usern )
{
QString macdef;
QMap<QString, QStringList>::ConstIterator it = l.macdef.begin();
for ( ; it != l.macdef.end(); ++it )
macdef += it.key() + '\\' + it.data().join( "\\" ) + '\n';
configData["autoLoginMacro"] = macdef;
}
}
}
if (config)
configData += *config;
slave->setConfig(configData);
slave->setProtocol(url.protocol());
slave->setHost(host, port, user, passwd);
}
}
bool Scheduler::startJobScheduled(ProtocolInfo *protInfo)
{
if (protInfo->joblist.isEmpty())
return false;
// kdDebug(7006) << "Scheduling job" << endl;
debug_info();
bool newSlave = false;
SimpleJob *job = 0;
Slave *slave = 0;
if (protInfo->skipCount > 2)
{
bool dummy;
// Prevent starvation. We skip the first entry in the queue at most
// 2 times in a row. The
protInfo->skipCount = 0;
job = protInfo->joblist.at(0);
slave = findIdleSlave(protInfo, job, dummy );
}
else
{
bool exact=false;
SimpleJob *firstJob = 0;
Slave *firstSlave = 0;
for(uint i = 0; (i < protInfo->joblist.count()) && (i < 10); i++)
{
job = protInfo->joblist.at(i);
slave = findIdleSlave(protInfo, job, exact);
if (!firstSlave)
{
firstJob = job;
firstSlave = slave;
}
if (!slave) break;
if (exact) break;
}
if (!exact)
{
slave = firstSlave;
job = firstJob;
}
if (job == firstJob)
protInfo->skipCount = 0;
else
protInfo->skipCount++;
}
if (!slave)
{
if ( protInfo->maxSlaves > static_cast<int>(protInfo->activeSlaves.count()) )
{
newSlave = true;
slave = createSlave(protInfo, job, job->url());
if (!slave)
slaveTimer.start(0, true);
}
}
if (!slave)
{
// kdDebug(7006) << "No slaves available" << endl;
// kdDebug(7006) << " -- active: " << protInfo->activeSlaves.count() << endl;
return false;
}
protInfo->activeSlaves.append(slave);
idleSlaves->removeRef(slave);
protInfo->joblist.removeRef(job);
// kdDebug(7006) << "scheduler: job started " << job << endl;
JobData *jobData = extraJobData->find(job);
setupSlave(slave, job->url(), jobData->protocol, jobData->proxy, newSlave);
job->start(slave);
slaveTimer.start(0, true);
return true;
}
bool Scheduler::startJobDirect()
{
debug_info();
SimpleJob *job = newJobs.take(0);
JobData *jobData = extraJobData->find(job);
if (!jobData)
{
kdFatal(7006) << "BUG! startjobDirect(): No extraJobData for job!"
<< endl;
return false;
}
QString protocol = jobData->protocol;
ProtocolInfo *protInfo = protInfoDict->get(protocol);
bool newSlave = false;
bool dummy;
// Look for matching slave
Slave *slave = findIdleSlave(protInfo, job, dummy);
if (!slave)
{
newSlave = true;
slave = createSlave(protInfo, job, job->url());
}
if (!slave)
return false;
idleSlaves->removeRef(slave);
// kdDebug(7006) << "scheduler: job started " << job << endl;
setupSlave(slave, job->url(), protocol, jobData->proxy, newSlave);
job->start(slave);
return true;
}
static Slave *searchIdleList(SlaveList *idleSlaves, const KURL &url, const QString &protocol, bool &exact)
{
QString host = url.host();
int port = url.port();
QString user = url.user();
exact = true;
for( Slave *slave = idleSlaves->first();
slave;
slave = idleSlaves->next())
{
if ((protocol == slave->slaveProtocol()) &&
(host == slave->host()) &&
(port == slave->port()) &&
(user == slave->user()))
return slave;
}
exact = false;
// Look for slightly matching slave
for( Slave *slave = idleSlaves->first();
slave;
slave = idleSlaves->next())
{
if (protocol == slave->slaveProtocol())
return slave;
}
return 0;
}
Slave *Scheduler::findIdleSlave(ProtocolInfo *, SimpleJob *job, bool &exact)
{
Slave *slave = 0;
JobData *jobData = extraJobData->find(job);
if (!jobData)
{
kdFatal(7006) << "BUG! findIdleSlave(): No extraJobData for job!" << endl;
return 0;
}
if (jobData->checkOnHold)
{
slave = Slave::holdSlave(jobData->protocol, job->url());
if (slave)
return slave;
}
if (slaveOnHold)
{
// Make sure that the job wants to do a GET or a POST, and with no offset
bool bCanReuse = (job->command() == CMD_GET);
KIO::TransferJob * tJob = dynamic_cast<KIO::TransferJob *>(job);
if ( tJob )
{
bCanReuse = (job->command() == CMD_GET || job->command() == CMD_SPECIAL);
if ( bCanReuse )
{
KIO::MetaData outgoing = tJob->outgoingMetaData();
QString resume = (!outgoing.contains("resume")) ? QString::null : outgoing["resume"];
kdDebug(7006) << "Resume metadata is '" << resume << "'" << endl;
bCanReuse = (resume.isEmpty() || resume == "0");
}
}
// kdDebug(7006) << "bCanReuse = " << bCanReuse << endl;
if (bCanReuse)
{
if (job->url() == urlOnHold)
{
kdDebug(7006) << "HOLD: Reusing held slave for " << urlOnHold.prettyURL() << endl;
slave = slaveOnHold;
}
else
{
kdDebug(7006) << "HOLD: Discarding held slave (" << urlOnHold.prettyURL() << ")" << endl;
slaveOnHold->kill();
}
slaveOnHold = 0;
urlOnHold = KURL();
}
if (slave)
return slave;
}
return searchIdleList(idleSlaves, job->url(), jobData->protocol, exact);
}
Slave *Scheduler::createSlave(ProtocolInfo *protInfo, SimpleJob *job, const KURL &url)
{
int error;
QString errortext;
Slave *slave = Slave::createSlave(protInfo->protocol, url, error, errortext);
if (slave)
{
slaveList->append(slave);
idleSlaves->append(slave);
connect(slave, SIGNAL(slaveDied(KIO::Slave *)),
SLOT(slotSlaveDied(KIO::Slave *)));
connect(slave, SIGNAL(slaveStatus(pid_t,const QCString &,const QString &, bool)),
SLOT(slotSlaveStatus(pid_t,const QCString &, const QString &, bool)));
connect(slave,SIGNAL(authorizationKey(const QCString&, const QCString&, bool)),
sessionData,SLOT(slotAuthData(const QCString&, const QCString&, bool)));
connect(slave,SIGNAL(delAuthorization(const QCString&)), sessionData,
SLOT(slotDelAuthData(const QCString&)));
}
else
{
kdError() <<": couldn't create slave : " << errortext << endl;
if (job)
{
protInfo->joblist.removeRef(job);
extraJobData->remove(job);
job->slotError( error, errortext );
}
}
return slave;
}
void Scheduler::slotSlaveStatus(pid_t, const QCString &, const QString &, bool)
{
}
void Scheduler::_jobFinished(SimpleJob *job, Slave *slave)
{
JobData *jobData = extraJobData->take(job);
if (!jobData)
{
kdFatal(7006) << "BUG! _jobFinished(): No extraJobData for job!" << endl;
return;
}
ProtocolInfo *protInfo = protInfoDict->get(jobData->protocol);
delete jobData;
slave->disconnect(job);
protInfo->activeSlaves.removeRef(slave);
if (slave->isAlive())
{
JobList *list = coSlaves.find(slave);
if (list)
{
assert(slave->isConnected());
assert(!coIdleSlaves->contains(slave));
coIdleSlaves->append(slave);
if (!list->isEmpty())
coSlaveTimer.start(0, true);
return;
}
else
{
assert(!slave->isConnected());
idleSlaves->append(slave);
slave->setIdle();
_scheduleCleanup();
// slave->send( CMD_SLAVE_STATUS );
}
}
if (protInfo->joblist.count())
{
slaveTimer.start(0, true);
}
}
void Scheduler::slotSlaveDied(KIO::Slave *slave)
{
assert(!slave->isAlive());
ProtocolInfo *protInfo = protInfoDict->get(slave->slaveProtocol());
protInfo->activeSlaves.removeRef(slave);
if (slave == slaveOnHold)
{
slaveOnHold = 0;
urlOnHold = KURL();
}
idleSlaves->removeRef(slave);
JobList *list = coSlaves.find(slave);
if (list)
{
// coSlave dies, kill jobs waiting in queue
disconnectSlave(slave);
}
if (!slaveList->removeRef(slave))
kdDebug(7006) << "Scheduler: BUG!! Slave " << slave << "/" << slave->slave_pid() << " died, but is NOT in slaveList!!!\n" << endl;
else
slave->deref(); // Delete slave
}
void Scheduler::slotCleanIdleSlaves()
{
for(Slave *slave = idleSlaves->first();slave;)
{
if (slave->idleTime() >= MAX_SLAVE_IDLE)
{
// kdDebug(7006) << "Removing idle slave: " << slave->slaveProtocol() << " " << slave->host() << endl;
Slave *removeSlave = slave;
slave = idleSlaves->next();
idleSlaves->removeRef(removeSlave);
slaveList->removeRef(removeSlave);
removeSlave->connection()->close();
removeSlave->deref();
}
else
{
slave = idleSlaves->next();
}
}
_scheduleCleanup();
}
void Scheduler::_scheduleCleanup()
{
if (idleSlaves->count())
{
if (!cleanupTimer.isActive())
cleanupTimer.start( MAX_SLAVE_IDLE*1000, true );
}
}
void Scheduler::_putSlaveOnHold(KIO::SimpleJob *job, const KURL &url)
{
Slave *slave = job->slave();
slave->disconnect(job);
if (slaveOnHold)
{
slaveOnHold->kill();
}
slaveOnHold = slave;
urlOnHold = url;
slaveOnHold->suspend();
}
void Scheduler::_publishSlaveOnHold()
{
if (!slaveOnHold)
return;
slaveOnHold->hold(urlOnHold);
}
void Scheduler::_removeSlaveOnHold()
{
if (slaveOnHold)
{
slaveOnHold->kill();
}
slaveOnHold = 0;
urlOnHold = KURL();
}
Slave *
Scheduler::_getConnectedSlave(const KURL &url, const KIO::MetaData &config )
{
QString proxy;
QString protocol = KProtocolManager::slaveProtocol(url, proxy);
bool dummy;
Slave *slave = searchIdleList(idleSlaves, url, protocol, dummy);
if (!slave)
{
ProtocolInfo *protInfo = protInfoDict->get(protocol);
slave = createSlave(protInfo, 0, url);
}
if (!slave)
return 0; // Error
idleSlaves->removeRef(slave);
setupSlave(slave, url, protocol, proxy, true, &config);
slave->send( CMD_CONNECT );
connect(slave, SIGNAL(connected()),
SLOT(slotSlaveConnected()));
connect(slave, SIGNAL(error(int, const QString &)),
SLOT(slotSlaveError(int, const QString &)));
coSlaves.insert(slave, new QPtrList<SimpleJob>());
// kdDebug(7006) << "_getConnectedSlave( " << slave << ")" << endl;
return slave;
}
void
Scheduler::slotScheduleCoSlave()
{
Slave *nextSlave;
slaveConfig = SlaveConfig::self();
for(Slave *slave = coIdleSlaves->first();
slave;
slave = nextSlave)
{
nextSlave = coIdleSlaves->next();
JobList *list = coSlaves.find(slave);
assert(list);
if (list && !list->isEmpty())
{
SimpleJob *job = list->take(0);
coIdleSlaves->removeRef(slave);
// kdDebug(7006) << "scheduler: job started " << job << endl;
assert(!coIdleSlaves->contains(slave));
KURL url =job->url();
QString host = url.host();
int port = url.port();
if (slave->host() == "<reset>")
{
QString user = url.user();
QString passwd = url.pass();
MetaData configData = slaveConfig->configData(url.protocol(), url.host());
slave->setConfig(configData);
slave->setProtocol(url.protocol());
slave->setHost(host, port, user, passwd);
}
assert(slave->protocol() == url.protocol());
assert(slave->host() == host);
assert(slave->port() == port);
job->start(slave);
}
}
}
void
Scheduler::slotSlaveConnected()
{
Slave *slave = (Slave *)sender();
// kdDebug(7006) << "slotSlaveConnected( " << slave << ")" << endl;
slave->setConnected(true);
disconnect(slave, SIGNAL(connected()),
this, SLOT(slotSlaveConnected()));
emit slaveConnected(slave);
assert(!coIdleSlaves->contains(slave));
coIdleSlaves->append(slave);
coSlaveTimer.start(0, true);
}
void
Scheduler::slotSlaveError(int errorNr, const QString &errorMsg)
{
Slave *slave = (Slave *)sender();
if (!slave->isConnected() || (coIdleSlaves->find(slave) != -1))
{
// Only forward to application if slave is idle or still connecting.
emit slaveError(slave, errorNr, errorMsg);
}
}
bool
Scheduler::_assignJobToSlave(KIO::Slave *slave, SimpleJob *job)
{
// kdDebug(7006) << "_assignJobToSlave( " << job << ", " << slave << ")" << endl;
QString dummy;
if ((slave->slaveProtocol() != KProtocolManager::slaveProtocol( job->url(), dummy ))
||
(!newJobs.removeRef(job)))
{
kdDebug(7006) << "_assignJobToSlave(): ERROR, nonmatching or unknown job." << endl;
job->kill();
return false;
}
JobList *list = coSlaves.find(slave);
assert(list);
if (!list)
{
kdDebug(7006) << "_assignJobToSlave(): ERROR, unknown slave." << endl;
job->kill();
return false;
}
assert(list->contains(job) == 0);
list->append(job);
coSlaveTimer.start(0, true); // Start job on timer event
return true;
}
bool
Scheduler::_disconnectSlave(KIO::Slave *slave)
{
// kdDebug(7006) << "_disconnectSlave( " << slave << ")" << endl;
JobList *list = coSlaves.take(slave);
assert(list);
if (!list)
return false;
// Kill jobs still in queue.
while(!list->isEmpty())
{
Job *job = list->take(0);
job->kill();
}
delete list;
coIdleSlaves->removeRef(slave);
assert(!coIdleSlaves->contains(slave));
disconnect(slave, SIGNAL(connected()),
this, SLOT(slotSlaveConnected()));
disconnect(slave, SIGNAL(error(int, const QString &)),
this, SLOT(slotSlaveError(int, const QString &)));
if (slave->isAlive())
{
idleSlaves->append(slave);
slave->send( CMD_DISCONNECT );
slave->setIdle();
slave->setConnected(false);
_scheduleCleanup();
}
return true;
}
void
Scheduler::_checkSlaveOnHold(bool b)
{
checkOnHold = b;
}
void
Scheduler::_registerWindow(QWidget *wid)<|fim▁hole|>{
if (!wid)
return;
QObject *obj = static_cast<QObject *>(wid);
if (!m_windowList.contains(obj))
{
// We must store the window Id because by the time
// the destroyed signal is emitted we can no longer
// access QWidget::winId() (already destructed)
WId windowId = wid->winId();
m_windowList.insert(obj, windowId);
connect(wid, SIGNAL(destroyed(QObject *)),
this, SLOT(slotUnregisterWindow(QObject*)));
QByteArray params;
QDataStream stream(params, IO_WriteOnly);
stream << windowId;
if( !kapp->dcopClient()->send( "kded", "kded",
"registerWindowId(long int)", params ) )
kdDebug(7006) << "Could not register window with kded!" << endl;
}
}
void
Scheduler::slotUnregisterWindow(QObject *obj)
{
if (!obj)
return;
QMap<QObject *, WId>::Iterator it = m_windowList.find(obj);
if (it == m_windowList.end())
return;
WId windowId = it.data();
disconnect( it.key(), SIGNAL(destroyed(QObject *)),
this, SLOT(slotUnregisterWindow(QObject*)));
m_windowList.remove( it );
if (kapp)
{
QByteArray params;
QDataStream stream(params, IO_WriteOnly);
stream << windowId;
kapp->dcopClient()->send( "kded", "kded",
"unregisterWindowId(long int)", params );
}
}
Scheduler* Scheduler::self() {
if ( !instance ) {
instance = new Scheduler;
}
return instance;
}
void Scheduler::virtual_hook( int id, void* data )
{ DCOPObject::virtual_hook( id, data ); }
#include "scheduler.moc"<|fim▁end|> | |
<|file_name|>Group.js<|end_file_name|><|fim▁begin|>import React, { PropTypes } from 'react';
import { connect } from 'react-redux';
import GroupPage from './GroupPage.js';
import GroupNotFoundPage from './GroupNotFoundPage.js';
const Group = ({ isValid, groupId }) => (isValid ? <GroupPage groupId={groupId} /> : <GroupNotFoundPage groupId={groupId} />);
Group.propTypes = {
groupId: PropTypes.string,
isValid: PropTypes.bool.isRequired
};
export default connect(state => ({
groupId: state.router.params.groupId,
isValid: !!state.groups.data[state.router.params.groupId]<|fim▁hole|>}))(Group);<|fim▁end|> | |
<|file_name|>server_manager.js<|end_file_name|><|fim▁begin|>var debug = require('util').debug,
inspect = require('util').inspect,
path = require('path'),
fs = require('fs'),
exec = require('child_process').exec,
spawn = require('child_process').spawn,
Connection = require('../../lib/mongodb').Connection,
Db = require('../../lib/mongodb').Db,
Server = require('../../lib/mongodb').Server;
var ServerManager = exports.ServerManager = function(options) {
options = options == null ? {} : options;
// Basic unpack values
this.path = path.resolve("data");
this.port = options["start_port"] != null ? options["start_port"] : 27017;
this.db_path = getPath(this, "data-" + this.port);
this.log_path = getPath(this, "log-" + this.port);
this.journal = options["journal"] != null ? options["journal"] : false;
this.auth = options['auth'] != null ? options['auth'] : false;
this.purgedirectories = options['purgedirectories'] != null ? options['purgedirectories'] : true;
// Server status values
this.up = false;
this.pid = null;
}
// Start up the server instance
ServerManager.prototype.start = function(killall, callback) {
var self = this;
// Unpack callback and variables
var args = Array.prototype.slice.call(arguments, 0);
callback = args.pop();
killall = args.length ? args.shift() : true;
// Create start command
var startCmd = generateStartCmd({log_path: self.log_path,
db_path: self.db_path, port: self.port, journal: self.journal, auth:self.auth});
// console.log("----------------------------------------------------------------------- start")
// console.log(startCmd)
exec(killall ? 'killall mongod' : '', function(err, stdout, stderr) {
if(self.purgedirectories) {
// Remove directory
exec("rm -rf " + self.db_path, function(err, stdout, stderr) {
if(err != null) return callback(err, null);
// Create directory
exec("mkdir -p " + self.db_path, function(err, stdout, stderr) {
if(err != null) return callback(err, null);
// Start up mongod process
var mongodb = exec(startCmd,
function (error, stdout, stderr) {
// console.log('stdout: ' + stdout);
// console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
// Wait for a half a second then save the pids
setTimeout(function() {
// Mark server as running
self.up = true;
self.pid = fs.readFileSync(path.join(self.db_path, "mongod.lock"), 'ascii').trim();
// Callback
callback();
}, 500);
});
});
} else {
// Ensure we remove the lock file as we are not purging the directory
fs.unlinkSync(path.join(self.db_path, "mongod.lock"));
// Start up mongod process<|fim▁hole|> function (error, stdout, stderr) {
if (error !== null) {
console.log('exec error: ' + error);
}
});
// Wait for a half a second then save the pids
setTimeout(function() {
// Mark server as running
self.up = true;
self.pid = fs.readFileSync(path.join(self.db_path, "mongod.lock"), 'ascii').trim();
// Callback
callback();
}, 5000);
}
});
}
ServerManager.prototype.stop = function(signal, callback) {
var self = this;
// Unpack callback and variables
var args = Array.prototype.slice.call(arguments, 0);
callback = args.pop();
signal = args.length ? args.shift() : 2;
// Stop the server
var command = "kill -" + signal + " " + self.pid;
// Kill process
exec(command,
function (error, stdout, stderr) {
// console.log('stdout: ' + stdout);
// console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
self.up = false;
// Wait for a second
setTimeout(callback, 1000);
});
}
ServerManager.prototype.killAll = function(callback) {
exec('killall mongod', function(err, stdout, stderr) {
callback(null, null);
});
}
// Get absolute path
var getPath = function(self, name) {
return path.join(self.path, name);
}
// Generate start command
var generateStartCmd = function(options) {
// Create boot command
var startCmd = "mongod --noprealloc --logpath '" + options['log_path'] + "' " +
" --dbpath " + options['db_path'] + " --port " + options['port'] + " --fork";
startCmd = options['journal'] ? startCmd + " --journal" : startCmd;
startCmd = options['auth'] ? startCmd + " --auth" : startCmd;
return startCmd;
}<|fim▁end|> | var mongodb = exec(startCmd, |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use]
extern crate num_derive;
#[macro_use]
extern crate serde;
pub mod resources;
use crossbeam_channel::{Receiver, Sender};
use ipc_channel::ipc::IpcSender;<|fim▁hole|>use std::fmt::{Debug, Error, Formatter};
use webrender_api::units::{DeviceIntPoint, DeviceIntSize};
pub use webxr_api::MainThreadWaker as EventLoopWaker;
/// A cursor for the window. This is different from a CSS cursor (see
/// `CursorKind`) in that it has no `Auto` value.
#[repr(u8)]
#[derive(Clone, Copy, Deserialize, Eq, FromPrimitive, PartialEq, Serialize)]
pub enum Cursor {
None,
Default,
Pointer,
ContextMenu,
Help,
Progress,
Wait,
Cell,
Crosshair,
Text,
VerticalText,
Alias,
Copy,
Move,
NoDrop,
NotAllowed,
Grab,
Grabbing,
EResize,
NResize,
NeResize,
NwResize,
SResize,
SeResize,
SwResize,
WResize,
EwResize,
NsResize,
NeswResize,
NwseResize,
ColResize,
RowResize,
AllScroll,
ZoomIn,
ZoomOut,
}
/// Sends messages to the embedder.
pub struct EmbedderProxy {
pub sender: Sender<(Option<TopLevelBrowsingContextId>, EmbedderMsg)>,
pub event_loop_waker: Box<dyn EventLoopWaker>,
}
impl EmbedderProxy {
pub fn send(&self, msg: (Option<TopLevelBrowsingContextId>, EmbedderMsg)) {
// Send a message and kick the OS event loop awake.
if let Err(err) = self.sender.send(msg) {
warn!("Failed to send response ({:?}).", err);
}
self.event_loop_waker.wake();
}
}
impl Clone for EmbedderProxy {
fn clone(&self) -> EmbedderProxy {
EmbedderProxy {
sender: self.sender.clone(),
event_loop_waker: self.event_loop_waker.clone(),
}
}
}
/// The port that the embedder receives messages on.
pub struct EmbedderReceiver {
pub receiver: Receiver<(Option<TopLevelBrowsingContextId>, EmbedderMsg)>,
}
impl EmbedderReceiver {
pub fn try_recv_embedder_msg(
&mut self,
) -> Option<(Option<TopLevelBrowsingContextId>, EmbedderMsg)> {
self.receiver.try_recv().ok()
}
pub fn recv_embedder_msg(&mut self) -> (Option<TopLevelBrowsingContextId>, EmbedderMsg) {
self.receiver.recv().unwrap()
}
}
#[derive(Deserialize, Serialize)]
pub enum EmbedderMsg {
/// A status message to be displayed by the browser chrome.
Status(Option<String>),
/// Alerts the embedder that the current page has changed its title.
ChangePageTitle(Option<String>),
/// Move the window to a point
MoveTo(DeviceIntPoint),
/// Resize the window to size
ResizeTo(DeviceIntSize),
// Show an alert message.
Alert(String, IpcSender<()>),
/// Wether or not to allow a pipeline to load a url.
AllowNavigationRequest(PipelineId, ServoUrl),
/// Whether or not to allow script to open a new tab/browser
AllowOpeningBrowser(IpcSender<bool>),
/// A new browser was created by script
BrowserCreated(TopLevelBrowsingContextId),
/// Wether or not to unload a document
AllowUnload(IpcSender<bool>),
/// Sends an unconsumed key event back to the embedder.
Keyboard(KeyboardEvent),
/// Gets system clipboard contents
GetClipboardContents(IpcSender<String>),
/// Sets system clipboard contents
SetClipboardContents(String),
/// Changes the cursor.
SetCursor(Cursor),
/// A favicon was detected
NewFavicon(ServoUrl),
/// <head> tag finished parsing
HeadParsed,
/// The history state has changed.
HistoryChanged(Vec<ServoUrl>, usize),
/// Enter or exit fullscreen
SetFullscreenState(bool),
/// The load of a page has begun
LoadStart,
/// The load of a page has completed
LoadComplete,
/// A browser is to be closed
CloseBrowser,
/// A pipeline panicked. First string is the reason, second one is the backtrace.
Panic(String, Option<String>),
/// Open dialog to select bluetooth device.
GetSelectedBluetoothDevice(Vec<String>, IpcSender<Option<String>>),
/// Open file dialog to select files. Set boolean flag to true allows to select multiple files.
SelectFiles(Vec<FilterPattern>, bool, IpcSender<Option<Vec<String>>>),
/// Request to present an IME to the user when an editable element is focused.
ShowIME(InputMethodType),
/// Request to hide the IME when the editable element is blurred.
HideIME,
/// Servo has shut down
Shutdown,
/// Report a complete sampled profile
ReportProfile(Vec<u8>),
}
impl Debug for EmbedderMsg {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
EmbedderMsg::Status(..) => write!(f, "Status"),
EmbedderMsg::ChangePageTitle(..) => write!(f, "ChangePageTitle"),
EmbedderMsg::MoveTo(..) => write!(f, "MoveTo"),
EmbedderMsg::ResizeTo(..) => write!(f, "ResizeTo"),
EmbedderMsg::Alert(..) => write!(f, "Alert"),
EmbedderMsg::AllowUnload(..) => write!(f, "AllowUnload"),
EmbedderMsg::AllowNavigationRequest(..) => write!(f, "AllowNavigationRequest"),
EmbedderMsg::Keyboard(..) => write!(f, "Keyboard"),
EmbedderMsg::GetClipboardContents(..) => write!(f, "GetClipboardContents"),
EmbedderMsg::SetClipboardContents(..) => write!(f, "SetClipboardContents"),
EmbedderMsg::SetCursor(..) => write!(f, "SetCursor"),
EmbedderMsg::NewFavicon(..) => write!(f, "NewFavicon"),
EmbedderMsg::HeadParsed => write!(f, "HeadParsed"),
EmbedderMsg::CloseBrowser => write!(f, "CloseBrowser"),
EmbedderMsg::HistoryChanged(..) => write!(f, "HistoryChanged"),
EmbedderMsg::SetFullscreenState(..) => write!(f, "SetFullscreenState"),
EmbedderMsg::LoadStart => write!(f, "LoadStart"),
EmbedderMsg::LoadComplete => write!(f, "LoadComplete"),
EmbedderMsg::Panic(..) => write!(f, "Panic"),
EmbedderMsg::GetSelectedBluetoothDevice(..) => write!(f, "GetSelectedBluetoothDevice"),
EmbedderMsg::SelectFiles(..) => write!(f, "SelectFiles"),
EmbedderMsg::ShowIME(..) => write!(f, "ShowIME"),
EmbedderMsg::HideIME => write!(f, "HideIME"),
EmbedderMsg::Shutdown => write!(f, "Shutdown"),
EmbedderMsg::AllowOpeningBrowser(..) => write!(f, "AllowOpeningBrowser"),
EmbedderMsg::BrowserCreated(..) => write!(f, "BrowserCreated"),
EmbedderMsg::ReportProfile(..) => write!(f, "ReportProfile"),
}
}
}
/// Filter for file selection;
/// the `String` content is expected to be extension (e.g, "doc", without the prefixing ".")
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FilterPattern(pub String);<|fim▁end|> | use keyboard_types::KeyboardEvent;
use msg::constellation_msg::{InputMethodType, PipelineId, TopLevelBrowsingContextId};
use servo_url::ServoUrl; |
<|file_name|>display.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import cv2
import cv2.cv as cv
class Display:
def setup(self, fullscreen):
cv2.namedWindow('proj_0', cv2.WINDOW_OPENGL)
if fullscreen:
cv2.setWindowProperty('proj_0', cv2.WND_PROP_FULLSCREEN, cv.CV_WINDOW_FULLSCREEN)
def draw(self, image):
cv2.imshow('proj_0', image)<|fim▁hole|> cv2.waitKey(1)<|fim▁end|> | |
<|file_name|>test_abiobjects.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Distributed under the terms of the MIT License.
import os
import warnings
from pymatgen.core.structure import Structure
from pymatgen.core.units import Ha_to_eV, bohr_to_ang
from pymatgen.io.abinit.abiobjects import *
from pymatgen.util.testing import PymatgenTest
class LatticeFromAbivarsTest(PymatgenTest):
def test_rprim_acell(self):
l1 = lattice_from_abivars(acell=3 * [10], rprim=np.eye(3))
self.assertAlmostEqual(l1.volume, bohr_to_ang ** 3 * 1000)
assert l1.angles == (90, 90, 90)
l2 = lattice_from_abivars(acell=3 * [10], angdeg=(90, 90, 90))
assert l1 == l2
l2 = lattice_from_abivars(acell=3 * [8], angdeg=(60, 60, 60))
abi_rprimd = (
np.reshape(
[
4.6188022,
0.0000000,
6.5319726,
-2.3094011,
4.0000000,
6.5319726,
-2.3094011,
-4.0000000,
6.5319726,
],
(3, 3),
)
* bohr_to_ang
)
self.assertArrayAlmostEqual(l2.matrix, abi_rprimd)
l3 = lattice_from_abivars(acell=[3, 6, 9], angdeg=(30, 40, 50))
abi_rprimd = (
np.reshape(
[
3.0000000,
0.0000000,
0.0000000,
3.8567257,
4.5962667,
0.0000000,
6.8944000,
4.3895544,
3.7681642,
],
(3, 3),
)
* bohr_to_ang
)
self.assertArrayAlmostEqual(l3.matrix, abi_rprimd)
with self.assertRaises(ValueError):
lattice_from_abivars(acell=[1, 1, 1], angdeg=(90, 90, 90), rprim=np.eye(3))
with self.assertRaises(ValueError):
lattice_from_abivars(acell=[1, 1, 1], angdeg=(-90, 90, 90))
def test_znucl_typat(self):
"""Test the order of typat and znucl in the Abinit input and enforce_typat, enforce_znucl."""
# Ga Ga1 1 0.33333333333333 0.666666666666667 0.500880 1.0
# Ga Ga2 1 0.66666666666667 0.333333333333333 0.000880 1.0
# N N3 1 0.333333333333333 0.666666666666667 0.124120 1.0
# N N4 1 0.666666666666667 0.333333333333333 0.624120 1.0
gan = Structure.from_file(os.path.join(PymatgenTest.TEST_FILES_DIR, "abinit", "gan.cif"))
# By default, znucl is filled using the first new type found in sites.
def_vars = structure_to_abivars(gan)
def_znucl = def_vars["znucl"]
self.assertArrayEqual(def_znucl, [31, 7])
def_typat = def_vars["typat"]
self.assertArrayEqual(def_typat, [1, 1, 2, 2])
# But it's possible to enforce a particular value of typat and znucl.
enforce_znucl = [7, 31]
enforce_typat = [2, 2, 1, 1]
enf_vars = structure_to_abivars(gan, enforce_znucl=enforce_znucl, enforce_typat=enforce_typat)
self.assertArrayEqual(enf_vars["znucl"], enforce_znucl)
self.assertArrayEqual(enf_vars["typat"], enforce_typat)
self.assertArrayEqual(def_vars["xred"], enf_vars["xred"])
assert [s.symbol for s in species_by_znucl(gan)] == ["Ga", "N"]
for itype1, itype2 in zip(def_typat, enforce_typat):
assert def_znucl[itype1 - 1] == enforce_znucl[itype2 - 1]
with self.assertRaises(Exception):
structure_to_abivars(gan, enforce_znucl=enforce_znucl, enforce_typat=None)
class SpinModeTest(PymatgenTest):
def test_base(self):
polarized = SpinMode.as_spinmode("polarized")
other_polarized = SpinMode.as_spinmode("polarized")
unpolarized = SpinMode.as_spinmode("unpolarized")
polarized.to_abivars()
self.assertTrue(polarized is other_polarized)
self.assertTrue(polarized == other_polarized)
self.assertTrue(polarized != unpolarized)
# Test pickle
self.serialize_with_pickle(polarized)
# Test dict methods
self.assertMSONable(polarized)
self.assertMSONable(unpolarized)
class SmearingTest(PymatgenTest):
def test_base(self):
fd1ev = Smearing.as_smearing("fermi_dirac:1 eV")
fd1ev.to_abivars()
self.assertTrue(fd1ev)
same_fd = Smearing.as_smearing("fermi_dirac:" + str(1.0 / Ha_to_eV))
self.assertTrue(same_fd == fd1ev)
nosmear = Smearing.nosmearing()
assert nosmear == Smearing.as_smearing("nosmearing")
self.assertFalse(nosmear)
self.assertTrue(nosmear != fd1ev)
self.assertMSONable(nosmear)
new_fd1ev = Smearing.from_dict(fd1ev.as_dict())
self.assertTrue(new_fd1ev == fd1ev)
# Test pickle
self.serialize_with_pickle(fd1ev)
# Test dict methods
self.assertMSONable(fd1ev)
class ElectronsAlgorithmTest(PymatgenTest):
def test_base(self):
algo = ElectronsAlgorithm(nstep=70)
abivars = algo.to_abivars()
# Test pickle
self.serialize_with_pickle(algo)
# Test dict methods
self.assertMSONable(algo)
class ElectronsTest(PymatgenTest):
def test_base(self):
default_electrons = Electrons()
self.assertTrue(default_electrons.nsppol == 2)
self.assertTrue(default_electrons.nspinor == 1)
self.assertTrue(default_electrons.nspden == 2)
abivars = default_electrons.to_abivars()
# new = Electron.from_dict(default_electrons.as_dict())
# Test pickle
self.serialize_with_pickle(default_electrons, test_eq=False)
custom_electrons = Electrons(
spin_mode="unpolarized",
smearing="marzari4:0.2 eV",
algorithm=ElectronsAlgorithm(nstep=70),
nband=10,
charge=1.0,
comment="Test comment",
)
# Test dict methods
self.assertMSONable(custom_electrons)
class KSamplingTest(PymatgenTest):
def test_base(self):
monkhorst = KSampling.monkhorst((3, 3, 3), (0.5, 0.5, 0.5), 0, False, False)
gamma_centered = KSampling.gamma_centered((3, 3, 3), False, False)
monkhorst.to_abivars()
# Test dict methods
self.assertMSONable(monkhorst)
self.assertMSONable(gamma_centered)
class RelaxationTest(PymatgenTest):
def test_base(self):
atoms_and_cell = RelaxationMethod.atoms_and_cell()
atoms_only = RelaxationMethod.atoms_only()
atoms_and_cell.to_abivars()
# Test dict methods
self.assertMSONable(atoms_and_cell)
self.assertMSONable(atoms_only)
class PPModelTest(PymatgenTest):
def test_base(self):
godby = PPModel.as_ppmodel("godby:12 eV")
# print(godby)
# print(repr(godby))
godby.to_abivars()
self.assertTrue(godby)
same_godby = PPModel.as_ppmodel("godby:" + str(12.0 / Ha_to_eV))
self.assertTrue(same_godby == godby)
noppm = PPModel.get_noppmodel()
self.assertFalse(noppm)
self.assertTrue(noppm != godby)
new_godby = PPModel.from_dict(godby.as_dict())
self.assertTrue(new_godby == godby)
# Test pickle
self.serialize_with_pickle(godby)
# Test dict methods
self.assertMSONable(godby)<|fim▁end|> | # Copyright (c) Pymatgen Development Team. |
<|file_name|>readium_sdk.js<|end_file_name|><|fim▁begin|>// LauncherOSX
//
// Created by Boris Schneiderman.
// Copyright (c) 2012-2013 The Readium Foundation.
//
// The Readium SDK is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
/**
* Top level ReadiumSDK namespace
* @class ReadiumSDK
* @static
*/
ReadiumSDK = {
/**
Current version of the JS SDK
@method version<|fim▁hole|> return "0.8.0";
},
Models : {
Smil: {}
},
Views : {},
Collections: {},
Routers: {},
Helpers: {}
};
_.extend(ReadiumSDK, Backbone.Events);<|fim▁end|> | @static
@return {string} version
*/
version: function() { |
<|file_name|>expand.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use self::Either::*;
use ast::{Block, Crate, DeclLocal, ExprMac, PatMac};
use ast::{Local, Ident, MacInvocTT};
use ast::{ItemMac, Mrk, Stmt, StmtDecl, StmtMac, StmtExpr, StmtSemi};
use ast::TokenTree;
use ast;
use ext::mtwt;
use ext::build::AstBuilder;
use attr;
use attr::AttrMetaMethods;
use codemap;
use codemap::{Span, Spanned, ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
use ext::base::*;
use fold;
use fold::*;
use parse;
use parse::token::{fresh_mark, fresh_name, intern};
use parse::token;
use ptr::P;
use util::small_vector::SmallVector;
use visit;
use visit::Visitor;
enum Either<L,R> {
Left(L),
Right(R)
}
pub fn expand_expr(e: P<ast::Expr>, fld: &mut MacroExpander) -> P<ast::Expr> {
e.and_then(|ast::Expr {id, node, span}| match node {
// expr_mac should really be expr_ext or something; it's the
// entry-point for all syntax extensions.
ast::ExprMac(mac) => {
let expanded_expr = match expand_mac_invoc(mac, span,
|r| r.make_expr(),
mark_expr, fld) {
Some(expr) => expr,
None => {
return DummyResult::raw_expr(span);
}
};
// Keep going, outside-in.
//
let fully_expanded = fld.fold_expr(expanded_expr);
fld.cx.bt_pop();
fully_expanded.map(|e| ast::Expr {
id: ast::DUMMY_NODE_ID,
node: e.node,
span: span,
})
}
ast::ExprWhile(cond, body, opt_ident) => {
let cond = fld.fold_expr(cond);
let (body, opt_ident) = expand_loop_block(body, opt_ident, fld);
fld.cx.expr(span, ast::ExprWhile(cond, body, opt_ident))
}
// Desugar ExprWhileLet
// From: `[opt_ident]: while let <pat> = <expr> <body>`
ast::ExprWhileLet(pat, expr, body, opt_ident) => {
// to:
//
// [opt_ident]: loop {
// match <expr> {
// <pat> => <body>,
// _ => break
// }
// }
// `<pat> => <body>`
let pat_arm = {
let body_expr = fld.cx.expr_block(body);
fld.cx.arm(pat.span, vec![pat], body_expr)
};
// `_ => break`
let break_arm = {
let pat_under = fld.cx.pat_wild(span);
let break_expr = fld.cx.expr_break(span);
fld.cx.arm(span, vec![pat_under], break_expr)
};
// `match <expr> { ... }`
let arms = vec![pat_arm, break_arm];
let match_expr = fld.cx.expr(span,
ast::ExprMatch(expr, arms, ast::MatchWhileLetDesugar));
// `[opt_ident]: loop { ... }`
let loop_block = fld.cx.block_expr(match_expr);
let (loop_block, opt_ident) = expand_loop_block(loop_block, opt_ident, fld);
fld.cx.expr(span, ast::ExprLoop(loop_block, opt_ident))
}
// Desugar ExprIfLet
// From: `if let <pat> = <expr> <body> [<elseopt>]`
ast::ExprIfLet(pat, expr, body, mut elseopt) => {
// to:
//
// match <expr> {
// <pat> => <body>,
// [_ if <elseopt_if_cond> => <elseopt_if_body>,]
// _ => [<elseopt> | ()]
// }
// `<pat> => <body>`
let pat_arm = {
let body_expr = fld.cx.expr_block(body);
fld.cx.arm(pat.span, vec![pat], body_expr)
};
// `[_ if <elseopt_if_cond> => <elseopt_if_body>,]`
let else_if_arms = {
let mut arms = vec![];
loop {
let elseopt_continue = elseopt
.and_then(|els| els.and_then(|els| match els.node {
// else if
ast::ExprIf(cond, then, elseopt) => {
let pat_under = fld.cx.pat_wild(span);
arms.push(ast::Arm {
attrs: vec![],
pats: vec![pat_under],
guard: Some(cond),
body: fld.cx.expr_block(then)
});
elseopt.map(|elseopt| (elseopt, true))
}
_ => Some((P(els), false))
}));
match elseopt_continue {
Some((e, true)) => {
elseopt = Some(e);
}
Some((e, false)) => {
elseopt = Some(e);
break;
}
None => {
elseopt = None;
break;
}
}
}
arms
};
// `_ => [<elseopt> | ()]`
let else_arm = {
let pat_under = fld.cx.pat_wild(span);
let else_expr = elseopt.unwrap_or_else(|| fld.cx.expr_tuple(span, vec![]));
fld.cx.arm(span, vec![pat_under], else_expr)
};
let mut arms = Vec::with_capacity(else_if_arms.len() + 2);
arms.push(pat_arm);
arms.extend(else_if_arms.into_iter());
arms.push(else_arm);
let match_expr = fld.cx.expr(span, ast::ExprMatch(expr, arms, ast::MatchIfLetDesugar));
fld.fold_expr(match_expr)
}
// Desugar support for ExprIfLet in the ExprIf else position
ast::ExprIf(cond, blk, elseopt) => {
let elseopt = elseopt.map(|els| els.and_then(|els| match els.node {
ast::ExprIfLet(..) => {
// wrap the if-let expr in a block
let span = els.span;
let blk = P(ast::Block {
view_items: vec![],
stmts: vec![],
expr: Some(P(els)),
id: ast::DUMMY_NODE_ID,
rules: ast::DefaultBlock,
span: span
});
fld.cx.expr_block(blk)
}
_ => P(els)
}));
let if_expr = fld.cx.expr(span, ast::ExprIf(cond, blk, elseopt));
if_expr.map(|e| noop_fold_expr(e, fld))
}
ast::ExprLoop(loop_block, opt_ident) => {
let (loop_block, opt_ident) = expand_loop_block(loop_block, opt_ident, fld);
fld.cx.expr(span, ast::ExprLoop(loop_block, opt_ident))
}
ast::ExprForLoop(pat, head, body, opt_ident) => {
let pat = fld.fold_pat(pat);
let head = fld.fold_expr(head);
let (body, opt_ident) = expand_loop_block(body, opt_ident, fld);
fld.cx.expr(span, ast::ExprForLoop(pat, head, body, opt_ident))
}
ast::ExprClosure(capture_clause, opt_kind, fn_decl, block) => {
let (rewritten_fn_decl, rewritten_block)
= expand_and_rename_fn_decl_and_block(fn_decl, block, fld);
let new_node = ast::ExprClosure(capture_clause,
opt_kind,
rewritten_fn_decl,
rewritten_block);
P(ast::Expr{id:id, node: new_node, span: fld.new_span(span)})
}
ast::ExprProc(fn_decl, block) => {
let (rewritten_fn_decl, rewritten_block)
= expand_and_rename_fn_decl_and_block(fn_decl, block, fld);
let new_node = ast::ExprProc(rewritten_fn_decl, rewritten_block);
P(ast::Expr{id:id, node: new_node, span: fld.new_span(span)})
}
_ => {
P(noop_fold_expr(ast::Expr {
id: id,
node: node,
span: span
}, fld))
}
})
}
/// Expand a (not-ident-style) macro invocation. Returns the result
/// of expansion and the mark which must be applied to the result.
/// Our current interface doesn't allow us to apply the mark to the
/// result until after calling make_expr, make_items, etc.
fn expand_mac_invoc<T>(mac: ast::Mac, span: codemap::Span,
parse_thunk: |Box<MacResult>|->Option<T>,
mark_thunk: |T,Mrk|->T,
fld: &mut MacroExpander)
-> Option<T>
{
match mac.node {
// it would almost certainly be cleaner to pass the whole
// macro invocation in, rather than pulling it apart and
// marking the tts and the ctxt separately. This also goes
// for the other three macro invocation chunks of code
// in this file.
// Token-tree macros:
MacInvocTT(pth, tts, _) => {
if pth.segments.len() > 1u {
fld.cx.span_err(pth.span,
"expected macro name without module \
separators");
// let compilation continue
return None;
}
let extname = pth.segments[0].identifier;
let extnamestr = token::get_ident(extname);
match fld.cx.syntax_env.find(&extname.name) {
None => {
fld.cx.span_err(
pth.span,
format!("macro undefined: '{}!'",
extnamestr.get()).as_slice());
// let compilation continue
None
}
Some(rc) => match *rc {
NormalTT(ref expandfun, exp_span) => {
fld.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
format: MacroBang,
span: exp_span,
},
});
let fm = fresh_mark();
let marked_before = mark_tts(tts.as_slice(), fm);
// The span that we pass to the expanders we want to
// be the root of the call stack. That's the most
// relevant span and it's the actual invocation of
// the macro.
let mac_span = fld.cx.original_span();
let opt_parsed = {
let expanded = expandfun.expand(fld.cx,
mac_span,
marked_before.as_slice());
parse_thunk(expanded)
};
let parsed = match opt_parsed {
Some(e) => e,
None => {
fld.cx.span_err(
pth.span,
format!("non-expression macro in expression position: {}",
extnamestr.get().as_slice()
).as_slice());
return None;
}
};
Some(mark_thunk(parsed,fm))
}
_ => {
fld.cx.span_err(
pth.span,
format!("'{}' is not a tt-style macro",
extnamestr.get()).as_slice());
None
}
}
}
}
}
}
/// Rename loop label and expand its loop body
///
/// The renaming procedure for loop is different in the sense that the loop
/// body is in a block enclosed by loop head so the renaming of loop label
/// must be propagated to the enclosed context.
fn expand_loop_block(loop_block: P<Block>,
opt_ident: Option<Ident>,
fld: &mut MacroExpander) -> (P<Block>, Option<Ident>) {
match opt_ident {
Some(label) => {
let new_label = fresh_name(&label);
let rename = (label, new_label);
// The rename *must not* be added to the pending list of current
// syntax context otherwise an unrelated `break` or `continue` in
// the same context will pick that up in the deferred renaming pass
// and be renamed incorrectly.
let mut rename_list = vec!(rename);
let mut rename_fld = IdentRenamer{renames: &mut rename_list};
let renamed_ident = rename_fld.fold_ident(label);
// The rename *must* be added to the enclosed syntax context for
// `break` or `continue` to pick up because by definition they are
// in a block enclosed by loop head.
fld.cx.syntax_env.push_frame();
fld.cx.syntax_env.info().pending_renames.push(rename);
let expanded_block = expand_block_elts(loop_block, fld);
fld.cx.syntax_env.pop_frame();
(expanded_block, Some(renamed_ident))
}
None => (fld.fold_block(loop_block), opt_ident)
}
}
// eval $e with a new exts frame.
// must be a macro so that $e isn't evaluated too early.
macro_rules! with_exts_frame (
($extsboxexpr:expr,$macros_escape:expr,$e:expr) =>
({$extsboxexpr.push_frame();
$extsboxexpr.info().macros_escape = $macros_escape;
let result = $e;
$extsboxexpr.pop_frame();
result
})
)
// When we enter a module, record it, for the sake of `module!`
pub fn expand_item(it: P<ast::Item>, fld: &mut MacroExpander)
-> SmallVector<P<ast::Item>> {
let it = expand_item_modifiers(it, fld);
let mut decorator_items = SmallVector::zero();
let mut new_attrs = Vec::new();
for attr in it.attrs.iter() {
let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) {
Some(rc) => match *rc {
Decorator(ref dec) => {
attr::mark_used(attr);
fld.cx.bt_push(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
name: mname.get().to_string(),
format: MacroAttribute,
span: None
}
});
// we'd ideally decorator_items.push_all(expand_item(item, fld)),
// but that double-mut-borrows fld
let mut items: SmallVector<P<ast::Item>> = SmallVector::zero();
dec.expand(fld.cx, attr.span, &*attr.node.value, &*it,
|item| items.push(item));
decorator_items.extend(items.into_iter()
.flat_map(|item| expand_item(item, fld).into_iter()));
fld.cx.bt_pop();
}
_ => new_attrs.push((*attr).clone()),
},
_ => new_attrs.push((*attr).clone()),
}
}
let mut new_items = match it.node {
ast::ItemMac(..) => expand_item_mac(it, fld),
ast::ItemMod(_) | ast::ItemForeignMod(_) => {
let valid_ident =
it.ident.name != parse::token::special_idents::invalid.name;
if valid_ident {
fld.cx.mod_push(it.ident);
}
let macro_escape = contains_macro_escape(new_attrs.as_slice());
let result = with_exts_frame!(fld.cx.syntax_env,
macro_escape,
noop_fold_item(it, fld));
if valid_ident {
fld.cx.mod_pop();
}
result
},
_ => {
let it = P(ast::Item {
attrs: new_attrs,
..(*it).clone()
});
noop_fold_item(it, fld)
}
};
new_items.push_all(decorator_items);
new_items
}
fn expand_item_modifiers(mut it: P<ast::Item>, fld: &mut MacroExpander)
-> P<ast::Item> {
// partition the attributes into ItemModifiers and others
let (modifiers, other_attrs) = it.attrs.partitioned(|attr| {
match fld.cx.syntax_env.find(&intern(attr.name().get())) {
Some(rc) => match *rc { Modifier(_) => true, _ => false },
_ => false
}
});
// update the attrs, leave everything else alone. Is this mutation really a good idea?
it = P(ast::Item {
attrs: other_attrs,
..(*it).clone()
});
if modifiers.is_empty() {
return it;
}
for attr in modifiers.iter() {
let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) {
Some(rc) => match *rc {
Modifier(ref mac) => {
attr::mark_used(attr);<|fim▁hole|> callee: NameAndSpan {
name: mname.get().to_string(),
format: MacroAttribute,
span: None,
}
});
it = mac.expand(fld.cx, attr.span, &*attr.node.value, it);
fld.cx.bt_pop();
}
_ => unreachable!()
},
_ => unreachable!()
}
}
// expansion may have added new ItemModifiers
expand_item_modifiers(it, fld)
}
/// Expand item_underscore
fn expand_item_underscore(item: ast::Item_, fld: &mut MacroExpander) -> ast::Item_ {
match item {
ast::ItemFn(decl, fn_style, abi, generics, body) => {
let (rewritten_fn_decl, rewritten_body)
= expand_and_rename_fn_decl_and_block(decl, body, fld);
let expanded_generics = fold::noop_fold_generics(generics,fld);
ast::ItemFn(rewritten_fn_decl, fn_style, abi, expanded_generics, rewritten_body)
}
_ => noop_fold_item_underscore(item, fld)
}
}
// does this attribute list contain "macro_escape" ?
fn contains_macro_escape(attrs: &[ast::Attribute]) -> bool {
attr::contains_name(attrs, "macro_escape")
}
// Support for item-position macro invocations, exactly the same
// logic as for expression-position macro invocations.
pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander)
-> SmallVector<P<ast::Item>> {
let (extname, path_span, tts) = match it.node {
ItemMac(codemap::Spanned {
node: MacInvocTT(ref pth, ref tts, _),
..
}) => {
(pth.segments[0].identifier, pth.span, (*tts).clone())
}
_ => fld.cx.span_bug(it.span, "invalid item macro invocation")
};
let extnamestr = token::get_ident(extname);
let fm = fresh_mark();
let def_or_items = {
let mut expanded = match fld.cx.syntax_env.find(&extname.name) {
None => {
fld.cx.span_err(path_span,
format!("macro undefined: '{}!'",
extnamestr).as_slice());
// let compilation continue
return SmallVector::zero();
}
Some(rc) => match *rc {
NormalTT(ref expander, span) => {
if it.ident.name != parse::token::special_idents::invalid.name {
fld.cx
.span_err(path_span,
format!("macro {}! expects no ident argument, \
given '{}'",
extnamestr,
token::get_ident(it.ident)).as_slice());
return SmallVector::zero();
}
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
format: MacroBang,
span: span
}
});
// mark before expansion:
let marked_before = mark_tts(tts.as_slice(), fm);
expander.expand(fld.cx, it.span, marked_before.as_slice())
}
IdentTT(ref expander, span) => {
if it.ident.name == parse::token::special_idents::invalid.name {
fld.cx.span_err(path_span,
format!("macro {}! expects an ident argument",
extnamestr.get()).as_slice());
return SmallVector::zero();
}
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
format: MacroBang,
span: span
}
});
// mark before expansion:
let marked_tts = mark_tts(tts.as_slice(), fm);
expander.expand(fld.cx, it.span, it.ident, marked_tts)
}
LetSyntaxTT(ref expander, span) => {
if it.ident.name == parse::token::special_idents::invalid.name {
fld.cx.span_err(path_span,
format!("macro {}! expects an ident argument",
extnamestr.get()).as_slice());
return SmallVector::zero();
}
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
format: MacroBang,
span: span
}
});
// DON'T mark before expansion:
expander.expand(fld.cx, it.span, it.ident, tts)
}
_ => {
fld.cx.span_err(it.span,
format!("{}! is not legal in item position",
extnamestr.get()).as_slice());
return SmallVector::zero();
}
}
};
match expanded.make_def() {
Some(def) => Left(def),
None => Right(expanded.make_items())
}
};
let items = match def_or_items {
Left(MacroDef { name, ext }) => {
// hidden invariant: this should only be possible as the
// result of expanding a LetSyntaxTT, and thus doesn't
// need to be marked. Not that it could be marked anyway.
// create issue to recommend refactoring here?
fld.cx.syntax_env.insert(intern(name.as_slice()), ext);
if attr::contains_name(it.attrs.as_slice(), "macro_export") {
fld.cx.exported_macros.push(it);
}
SmallVector::zero()
}
Right(Some(items)) => {
items.into_iter()
.map(|i| mark_item(i, fm))
.flat_map(|i| fld.fold_item(i).into_iter())
.collect()
}
Right(None) => {
fld.cx.span_err(path_span,
format!("non-item macro in item position: {}",
extnamestr.get()).as_slice());
return SmallVector::zero();
}
};
fld.cx.bt_pop();
items
}
/// Expand a stmt
//
// I don't understand why this returns a vector... it looks like we're
// half done adding machinery to allow macros to expand into multiple statements.
fn expand_stmt(s: Stmt, fld: &mut MacroExpander) -> SmallVector<P<Stmt>> {
let (mac, semi) = match s.node {
StmtMac(mac, semi) => (mac, semi),
_ => return expand_non_macro_stmt(s, fld)
};
let expanded_stmt = match expand_mac_invoc(mac, s.span,
|r| r.make_stmt(),
mark_stmt, fld) {
Some(stmt) => stmt,
None => {
return SmallVector::zero();
}
};
// Keep going, outside-in.
let fully_expanded = fld.fold_stmt(expanded_stmt);
fld.cx.bt_pop();
if semi {
fully_expanded.into_iter().map(|s| s.map(|Spanned {node, span}| {
Spanned {
node: match node {
StmtExpr(e, stmt_id) => StmtSemi(e, stmt_id),
_ => node /* might already have a semi */
},
span: span
}
})).collect()
} else {
fully_expanded
}
}
// expand a non-macro stmt. this is essentially the fallthrough for
// expand_stmt, above.
fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroExpander)
-> SmallVector<P<Stmt>> {
// is it a let?
match node {
StmtDecl(decl, node_id) => decl.and_then(|Spanned {node: decl, span}| match decl {
DeclLocal(local) => {
// take it apart:
let rewritten_local = local.map(|Local {id, pat, ty, init, source, span}| {
// expand the ty since TyFixedLengthVec contains an Expr
// and thus may have a macro use
let expanded_ty = fld.fold_ty(ty);
// expand the pat (it might contain macro uses):
let expanded_pat = fld.fold_pat(pat);
// find the PatIdents in the pattern:
// oh dear heaven... this is going to include the enum
// names, as well... but that should be okay, as long as
// the new names are gensyms for the old ones.
// generate fresh names, push them to a new pending list
let idents = pattern_bindings(&*expanded_pat);
let mut new_pending_renames =
idents.iter().map(|ident| (*ident, fresh_name(ident))).collect();
// rewrite the pattern using the new names (the old
// ones have already been applied):
let rewritten_pat = {
// nested binding to allow borrow to expire:
let mut rename_fld = IdentRenamer{renames: &mut new_pending_renames};
rename_fld.fold_pat(expanded_pat)
};
// add them to the existing pending renames:
fld.cx.syntax_env.info().pending_renames
.extend(new_pending_renames.into_iter());
Local {
id: id,
ty: expanded_ty,
pat: rewritten_pat,
// also, don't forget to expand the init:
init: init.map(|e| fld.fold_expr(e)),
source: source,
span: span
}
});
SmallVector::one(P(Spanned {
node: StmtDecl(P(Spanned {
node: DeclLocal(rewritten_local),
span: span
}),
node_id),
span: stmt_span
}))
}
_ => {
noop_fold_stmt(Spanned {
node: StmtDecl(P(Spanned {
node: decl,
span: span
}),
node_id),
span: stmt_span
}, fld)
}
}),
_ => {
noop_fold_stmt(Spanned {
node: node,
span: stmt_span
}, fld)
}
}
}
// expand the arm of a 'match', renaming for macro hygiene
fn expand_arm(arm: ast::Arm, fld: &mut MacroExpander) -> ast::Arm {
// expand pats... they might contain macro uses:
let expanded_pats = arm.pats.move_map(|pat| fld.fold_pat(pat));
if expanded_pats.len() == 0 {
panic!("encountered match arm with 0 patterns");
}
// all of the pats must have the same set of bindings, so use the
// first one to extract them and generate new names:
let idents = pattern_bindings(&*expanded_pats[0]);
let new_renames = idents.into_iter().map(|id| (id, fresh_name(&id))).collect();
// apply the renaming, but only to the PatIdents:
let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames};
let rewritten_pats = expanded_pats.move_map(|pat| rename_pats_fld.fold_pat(pat));
// apply renaming and then expansion to the guard and the body:
let mut rename_fld = IdentRenamer{renames:&new_renames};
let rewritten_guard =
arm.guard.map(|g| fld.fold_expr(rename_fld.fold_expr(g)));
let rewritten_body = fld.fold_expr(rename_fld.fold_expr(arm.body));
ast::Arm {
attrs: arm.attrs.move_map(|x| fld.fold_attribute(x)),
pats: rewritten_pats,
guard: rewritten_guard,
body: rewritten_body,
}
}
/// A visitor that extracts the PatIdent (binding) paths
/// from a given thingy and puts them in a mutable
/// array
#[deriving(Clone)]
struct PatIdentFinder {
ident_accumulator: Vec<ast::Ident>
}
impl<'v> Visitor<'v> for PatIdentFinder {
fn visit_pat(&mut self, pattern: &ast::Pat) {
match *pattern {
ast::Pat { id: _, node: ast::PatIdent(_, ref path1, ref inner), span: _ } => {
self.ident_accumulator.push(path1.node);
// visit optional subpattern of PatIdent:
for subpat in inner.iter() {
self.visit_pat(&**subpat)
}
}
// use the default traversal for non-PatIdents
_ => visit::walk_pat(self, pattern)
}
}
}
/// find the PatIdent paths in a pattern
fn pattern_bindings(pat: &ast::Pat) -> Vec<ast::Ident> {
let mut name_finder = PatIdentFinder{ident_accumulator:Vec::new()};
name_finder.visit_pat(pat);
name_finder.ident_accumulator
}
/// find the PatIdent paths in a
fn fn_decl_arg_bindings(fn_decl: &ast::FnDecl) -> Vec<ast::Ident> {
let mut pat_idents = PatIdentFinder{ident_accumulator:Vec::new()};
for arg in fn_decl.inputs.iter() {
pat_idents.visit_pat(&*arg.pat);
}
pat_idents.ident_accumulator
}
// expand a block. pushes a new exts_frame, then calls expand_block_elts
pub fn expand_block(blk: P<Block>, fld: &mut MacroExpander) -> P<Block> {
// see note below about treatment of exts table
with_exts_frame!(fld.cx.syntax_env,false,
expand_block_elts(blk, fld))
}
// expand the elements of a block.
pub fn expand_block_elts(b: P<Block>, fld: &mut MacroExpander) -> P<Block> {
b.map(|Block {id, view_items, stmts, expr, rules, span}| {
let new_view_items = view_items.into_iter().map(|x| fld.fold_view_item(x)).collect();
let new_stmts = stmts.into_iter().flat_map(|x| {
// perform all pending renames
let renamed_stmt = {
let pending_renames = &mut fld.cx.syntax_env.info().pending_renames;
let mut rename_fld = IdentRenamer{renames:pending_renames};
rename_fld.fold_stmt(x).expect_one("rename_fold didn't return one value")
};
// expand macros in the statement
fld.fold_stmt(renamed_stmt).into_iter()
}).collect();
let new_expr = expr.map(|x| {
let expr = {
let pending_renames = &mut fld.cx.syntax_env.info().pending_renames;
let mut rename_fld = IdentRenamer{renames:pending_renames};
rename_fld.fold_expr(x)
};
fld.fold_expr(expr)
});
Block {
id: fld.new_id(id),
view_items: new_view_items,
stmts: new_stmts,
expr: new_expr,
rules: rules,
span: span
}
})
}
fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
match p.node {
PatMac(_) => {}
_ => return noop_fold_pat(p, fld)
}
p.map(|ast::Pat {node, span, ..}| {
let (pth, tts) = match node {
PatMac(mac) => match mac.node {
MacInvocTT(pth, tts, _) => {
(pth, tts)
}
},
_ => unreachable!()
};
if pth.segments.len() > 1u {
fld.cx.span_err(pth.span, "expected macro name without module separators");
return DummyResult::raw_pat(span);
}
let extname = pth.segments[0].identifier;
let extnamestr = token::get_ident(extname);
let marked_after = match fld.cx.syntax_env.find(&extname.name) {
None => {
fld.cx.span_err(pth.span,
format!("macro undefined: '{}!'",
extnamestr).as_slice());
// let compilation continue
return DummyResult::raw_pat(span);
}
Some(rc) => match *rc {
NormalTT(ref expander, tt_span) => {
fld.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
name: extnamestr.get().to_string(),
format: MacroBang,
span: tt_span
}
});
let fm = fresh_mark();
let marked_before = mark_tts(tts.as_slice(), fm);
let mac_span = fld.cx.original_span();
let expanded = match expander.expand(fld.cx,
mac_span,
marked_before.as_slice()).make_pat() {
Some(e) => e,
None => {
fld.cx.span_err(
pth.span,
format!(
"non-pattern macro in pattern position: {}",
extnamestr.get()
).as_slice()
);
return DummyResult::raw_pat(span);
}
};
// mark after:
mark_pat(expanded,fm)
}
_ => {
fld.cx.span_err(span,
format!("{}! is not legal in pattern position",
extnamestr.get()).as_slice());
return DummyResult::raw_pat(span);
}
}
};
let fully_expanded =
fld.fold_pat(marked_after).node.clone();
fld.cx.bt_pop();
ast::Pat {
id: ast::DUMMY_NODE_ID,
node: fully_expanded,
span: span
}
})
}
/// A tree-folder that applies every rename in its (mutable) list
/// to every identifier, including both bindings and varrefs
/// (and lots of things that will turn out to be neither)
pub struct IdentRenamer<'a> {
renames: &'a mtwt::RenameList,
}
impl<'a> Folder for IdentRenamer<'a> {
fn fold_ident(&mut self, id: Ident) -> Ident {
Ident {
name: id.name,
ctxt: mtwt::apply_renames(self.renames, id.ctxt),
}
}
fn fold_mac(&mut self, macro: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(macro, self)
}
}
/// A tree-folder that applies every rename in its list to
/// the idents that are in PatIdent patterns. This is more narrowly
/// focused than IdentRenamer, and is needed for FnDecl,
/// where we want to rename the args but not the fn name or the generics etc.
pub struct PatIdentRenamer<'a> {
renames: &'a mtwt::RenameList,
}
impl<'a> Folder for PatIdentRenamer<'a> {
fn fold_pat(&mut self, pat: P<ast::Pat>) -> P<ast::Pat> {
match pat.node {
ast::PatIdent(..) => {},
_ => return noop_fold_pat(pat, self)
}
pat.map(|ast::Pat {id, node, span}| match node {
ast::PatIdent(binding_mode, Spanned{span: sp, node: ident}, sub) => {
let new_ident = Ident{name: ident.name,
ctxt: mtwt::apply_renames(self.renames, ident.ctxt)};
let new_node =
ast::PatIdent(binding_mode,
Spanned{span: self.new_span(sp), node: new_ident},
sub.map(|p| self.fold_pat(p)));
ast::Pat {
id: id,
node: new_node,
span: self.new_span(span)
}
},
_ => unreachable!()
})
}
fn fold_mac(&mut self, macro: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(macro, self)
}
}
// expand a method
fn expand_method(m: P<ast::Method>, fld: &mut MacroExpander) -> SmallVector<P<ast::Method>> {
m.and_then(|m| match m.node {
ast::MethDecl(ident,
generics,
abi,
explicit_self,
fn_style,
decl,
body,
vis) => {
let id = fld.new_id(m.id);
let (rewritten_fn_decl, rewritten_body)
= expand_and_rename_fn_decl_and_block(decl,body,fld);
SmallVector::one(P(ast::Method {
attrs: m.attrs.move_map(|a| fld.fold_attribute(a)),
id: id,
span: fld.new_span(m.span),
node: ast::MethDecl(fld.fold_ident(ident),
noop_fold_generics(generics, fld),
abi,
fld.fold_explicit_self(explicit_self),
fn_style,
rewritten_fn_decl,
rewritten_body,
vis)
}))
},
ast::MethMac(mac) => {
let maybe_new_methods =
expand_mac_invoc(mac, m.span,
|r| r.make_methods(),
|meths, mark| meths.move_map(|m| mark_method(m, mark)),
fld);
let new_methods = match maybe_new_methods {
Some(methods) => methods,
None => SmallVector::zero()
};
// expand again if necessary
let new_methods = new_methods.into_iter()
.flat_map(|m| fld.fold_method(m).into_iter()).collect();
fld.cx.bt_pop();
new_methods
}
})
}
/// Given a fn_decl and a block and a MacroExpander, expand the fn_decl, then use the
/// PatIdents in its arguments to perform renaming in the FnDecl and
/// the block, returning both the new FnDecl and the new Block.
fn expand_and_rename_fn_decl_and_block(fn_decl: P<ast::FnDecl>, block: P<ast::Block>,
fld: &mut MacroExpander)
-> (P<ast::FnDecl>, P<ast::Block>) {
let expanded_decl = fld.fold_fn_decl(fn_decl);
let idents = fn_decl_arg_bindings(&*expanded_decl);
let renames =
idents.iter().map(|id : &ast::Ident| (*id,fresh_name(id))).collect();
// first, a renamer for the PatIdents, for the fn_decl:
let mut rename_pat_fld = PatIdentRenamer{renames: &renames};
let rewritten_fn_decl = rename_pat_fld.fold_fn_decl(expanded_decl);
// now, a renamer for *all* idents, for the body:
let mut rename_fld = IdentRenamer{renames: &renames};
let rewritten_body = fld.fold_block(rename_fld.fold_block(block));
(rewritten_fn_decl,rewritten_body)
}
/// A tree-folder that performs macro expansion
pub struct MacroExpander<'a, 'b:'a> {
pub cx: &'a mut ExtCtxt<'b>,
}
impl<'a, 'b> Folder for MacroExpander<'a, 'b> {
fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
expand_expr(expr, self)
}
fn fold_pat(&mut self, pat: P<ast::Pat>) -> P<ast::Pat> {
expand_pat(pat, self)
}
fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
expand_item(item, self)
}
fn fold_item_underscore(&mut self, item: ast::Item_) -> ast::Item_ {
expand_item_underscore(item, self)
}
fn fold_stmt(&mut self, stmt: P<ast::Stmt>) -> SmallVector<P<ast::Stmt>> {
stmt.and_then(|stmt| expand_stmt(stmt, self))
}
fn fold_block(&mut self, block: P<Block>) -> P<Block> {
expand_block(block, self)
}
fn fold_arm(&mut self, arm: ast::Arm) -> ast::Arm {
expand_arm(arm, self)
}
fn fold_method(&mut self, method: P<ast::Method>) -> SmallVector<P<ast::Method>> {
expand_method(method, self)
}
fn new_span(&mut self, span: Span) -> Span {
new_span(self.cx, span)
}
}
fn new_span(cx: &ExtCtxt, sp: Span) -> Span {
/* this discards information in the case of macro-defining macros */
Span {
lo: sp.lo,
hi: sp.hi,
expn_id: cx.backtrace(),
}
}
pub struct ExpansionConfig {
pub crate_name: String,
pub deriving_hash_type_parameter: bool,
pub enable_quotes: bool,
pub recursion_limit: uint,
}
impl ExpansionConfig {
pub fn default(crate_name: String) -> ExpansionConfig {
ExpansionConfig {
crate_name: crate_name,
deriving_hash_type_parameter: false,
enable_quotes: false,
recursion_limit: 64,
}
}
}
pub struct ExportedMacros {
pub crate_name: Ident,
pub macros: Vec<String>,
}
pub fn expand_crate(parse_sess: &parse::ParseSess,
cfg: ExpansionConfig,
// these are the macros being imported to this crate:
imported_macros: Vec<ExportedMacros>,
user_exts: Vec<NamedSyntaxExtension>,
c: Crate) -> Crate {
let mut cx = ExtCtxt::new(parse_sess, c.config.clone(), cfg);
let mut expander = MacroExpander {
cx: &mut cx,
};
for ExportedMacros { crate_name, macros } in imported_macros.into_iter() {
let name = format!("<{} macros>", token::get_ident(crate_name))
.into_string();
for source in macros.into_iter() {
let item = parse::parse_item_from_source_str(name.clone(),
source,
expander.cx.cfg(),
expander.cx.parse_sess())
.expect("expected a serialized item");
expand_item_mac(item, &mut expander);
}
}
for (name, extension) in user_exts.into_iter() {
expander.cx.syntax_env.insert(name, extension);
}
let mut ret = expander.fold_crate(c);
ret.exported_macros = expander.cx.exported_macros.clone();
parse_sess.span_diagnostic.handler().abort_if_errors();
return ret;
}
// HYGIENIC CONTEXT EXTENSION:
// all of these functions are for walking over
// ASTs and making some change to the context of every
// element that has one. a CtxtFn is a trait-ified
// version of a closure in (SyntaxContext -> SyntaxContext).
// the ones defined here include:
// Marker - add a mark to a context
// A Marker adds the given mark to the syntax context
struct Marker { mark: Mrk }
impl Folder for Marker {
fn fold_ident(&mut self, id: Ident) -> Ident {
ast::Ident {
name: id.name,
ctxt: mtwt::apply_mark(self.mark, id.ctxt)
}
}
fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac {
Spanned {
node: match node {
MacInvocTT(path, tts, ctxt) => {
MacInvocTT(self.fold_path(path),
self.fold_tts(tts.as_slice()),
mtwt::apply_mark(self.mark, ctxt))
}
},
span: span,
}
}
}
// apply a given mark to the given token trees. Used prior to expansion of a macro.
fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec<TokenTree> {
noop_fold_tts(tts, &mut Marker{mark:m})
}
// apply a given mark to the given expr. Used following the expansion of a macro.
fn mark_expr(expr: P<ast::Expr>, m: Mrk) -> P<ast::Expr> {
Marker{mark:m}.fold_expr(expr)
}
// apply a given mark to the given pattern. Used following the expansion of a macro.
fn mark_pat(pat: P<ast::Pat>, m: Mrk) -> P<ast::Pat> {
Marker{mark:m}.fold_pat(pat)
}
// apply a given mark to the given stmt. Used following the expansion of a macro.
fn mark_stmt(expr: P<ast::Stmt>, m: Mrk) -> P<ast::Stmt> {
Marker{mark:m}.fold_stmt(expr)
.expect_one("marking a stmt didn't return exactly one stmt")
}
// apply a given mark to the given item. Used following the expansion of a macro.
fn mark_item(expr: P<ast::Item>, m: Mrk) -> P<ast::Item> {
Marker{mark:m}.fold_item(expr)
.expect_one("marking an item didn't return exactly one item")
}
// apply a given mark to the given item. Used following the expansion of a macro.
fn mark_method(expr: P<ast::Method>, m: Mrk) -> P<ast::Method> {
Marker{mark:m}.fold_method(expr)
.expect_one("marking an item didn't return exactly one method")
}
/// Check that there are no macro invocations left in the AST:
pub fn check_for_macros(sess: &parse::ParseSess, krate: &ast::Crate) {
visit::walk_crate(&mut MacroExterminator{sess:sess}, krate);
}
/// A visitor that ensures that no macro invocations remain in an AST.
struct MacroExterminator<'a>{
sess: &'a parse::ParseSess
}
impl<'a, 'v> Visitor<'v> for MacroExterminator<'a> {
fn visit_mac(&mut self, macro: &ast::Mac) {
self.sess.span_diagnostic.span_bug(macro.span,
"macro exterminator: expected AST \
with no macro invocations");
}
}
#[cfg(test)]
mod test {
use super::{pattern_bindings, expand_crate, contains_macro_escape};
use super::{PatIdentFinder, IdentRenamer, PatIdentRenamer, ExpansionConfig};
use ast;
use ast::{Attribute_, AttrOuter, MetaWord, Name};
use attr;
use codemap;
use codemap::Spanned;
use ext::mtwt;
use fold::Folder;
use parse;
use parse::token;
use ptr::P;
use util::parser_testing::{string_to_parser};
use util::parser_testing::{string_to_pat, string_to_crate, strs_to_idents};
use visit;
use visit::Visitor;
// a visitor that extracts the paths
// from a given thingy and puts them in a mutable
// array (passed in to the traversal)
#[deriving(Clone)]
struct PathExprFinderContext {
path_accumulator: Vec<ast::Path> ,
}
impl<'v> Visitor<'v> for PathExprFinderContext {
fn visit_expr(&mut self, expr: &ast::Expr) {
match expr.node {
ast::ExprPath(ref p) => {
self.path_accumulator.push(p.clone());
// not calling visit_path, but it should be fine.
}
_ => visit::walk_expr(self, expr)
}
}
}
// find the variable references in a crate
fn crate_varrefs(the_crate : &ast::Crate) -> Vec<ast::Path> {
let mut path_finder = PathExprFinderContext{path_accumulator:Vec::new()};
visit::walk_crate(&mut path_finder, the_crate);
path_finder.path_accumulator
}
/// A Visitor that extracts the identifiers from a thingy.
// as a side note, I'm starting to want to abstract over these....
struct IdentFinder {
ident_accumulator: Vec<ast::Ident>
}
impl<'v> Visitor<'v> for IdentFinder {
fn visit_ident(&mut self, _: codemap::Span, id: ast::Ident){
self.ident_accumulator.push(id);
}
}
/// Find the idents in a crate
fn crate_idents(the_crate: &ast::Crate) -> Vec<ast::Ident> {
let mut ident_finder = IdentFinder{ident_accumulator: Vec::new()};
visit::walk_crate(&mut ident_finder, the_crate);
ident_finder.ident_accumulator
}
// these following tests are quite fragile, in that they don't test what
// *kind* of failure occurs.
fn test_ecfg() -> ExpansionConfig {
ExpansionConfig::default("test".to_string())
}
// make sure that macros can't escape fns
#[should_fail]
#[test] fn macros_cant_escape_fns_test () {
let src = "fn bogus() {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }".to_string();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_string(),
src,
Vec::new(), &sess);
// should fail:
expand_crate(&sess,test_ecfg(),vec!(),vec!(),crate_ast);
}
// make sure that macros can't escape modules
#[should_fail]
#[test] fn macros_cant_escape_mods_test () {
let src = "mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }".to_string();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_string(),
src,
Vec::new(), &sess);
expand_crate(&sess,test_ecfg(),vec!(),vec!(),crate_ast);
}
// macro_escape modules should allow macros to escape
#[test] fn macros_can_escape_flattened_mods_test () {
let src = "#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }".to_string();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_string(),
src,
Vec::new(), &sess);
expand_crate(&sess, test_ecfg(), vec!(), vec!(), crate_ast);
}
#[test] fn test_contains_flatten (){
let attr1 = make_dummy_attr ("foo");
let attr2 = make_dummy_attr ("bar");
let escape_attr = make_dummy_attr ("macro_escape");
let attrs1 = vec!(attr1.clone(), escape_attr, attr2.clone());
assert_eq!(contains_macro_escape(attrs1.as_slice()),true);
let attrs2 = vec!(attr1,attr2);
assert_eq!(contains_macro_escape(attrs2.as_slice()),false);
}
// make a MetaWord outer attribute with the given name
fn make_dummy_attr(s: &str) -> ast::Attribute {
Spanned {
span:codemap::DUMMY_SP,
node: Attribute_ {
id: attr::mk_attr_id(),
style: AttrOuter,
value: P(Spanned {
node: MetaWord(token::intern_and_get_ident(s)),
span: codemap::DUMMY_SP,
}),
is_sugared_doc: false,
}
}
}
fn expand_crate_str(crate_str: String) -> ast::Crate {
let ps = parse::new_parse_sess();
let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod();
// the cfg argument actually does matter, here...
expand_crate(&ps,test_ecfg(),vec!(),vec!(),crate_ast)
}
// find the pat_ident paths in a crate
fn crate_bindings(the_crate : &ast::Crate) -> Vec<ast::Ident> {
let mut name_finder = PatIdentFinder{ident_accumulator:Vec::new()};
visit::walk_crate(&mut name_finder, the_crate);
name_finder.ident_accumulator
}
#[test] fn macro_tokens_should_match(){
expand_crate_str(
"macro_rules! m((a)=>(13)) fn main(){m!(a);}".to_string());
}
// should be able to use a bound identifier as a literal in a macro definition:
#[test] fn self_macro_parsing(){
expand_crate_str(
"macro_rules! foo ((zz) => (287u;))
fn f(zz : int) {foo!(zz);}".to_string()
);
}
// renaming tests expand a crate and then check that the bindings match
// the right varrefs. The specification of the test case includes the
// text of the crate, and also an array of arrays. Each element in the
// outer array corresponds to a binding in the traversal of the AST
// induced by visit. Each of these arrays contains a list of indexes,
// interpreted as the varrefs in the varref traversal that this binding
// should match. So, for instance, in a program with two bindings and
// three varrefs, the array ~[~[1,2],~[0]] would indicate that the first
// binding should match the second two varrefs, and the second binding
// should match the first varref.
//
// Put differently; this is a sparse representation of a boolean matrix
// indicating which bindings capture which identifiers.
//
// Note also that this matrix is dependent on the implicit ordering of
// the bindings and the varrefs discovered by the name-finder and the path-finder.
//
// The comparisons are done post-mtwt-resolve, so we're comparing renamed
// names; differences in marks don't matter any more.
//
// oog... I also want tests that check "bound-identifier-=?". That is,
// not just "do these have the same name", but "do they have the same
// name *and* the same marks"? Understanding this is really pretty painful.
// in principle, you might want to control this boolean on a per-varref basis,
// but that would make things even harder to understand, and might not be
// necessary for thorough testing.
type RenamingTest = (&'static str, Vec<Vec<uint>>, bool);
#[test]
fn automatic_renaming () {
let tests: Vec<RenamingTest> =
vec!(// b & c should get new names throughout, in the expr too:
("fn a() -> int { let b = 13; let c = b; b+c }",
vec!(vec!(0,1),vec!(2)), false),
// both x's should be renamed (how is this causing a bug?)
("fn main () {let x: int = 13;x;}",
vec!(vec!(0)), false),
// the use of b after the + should be renamed, the other one not:
("macro_rules! f (($x:ident) => (b + $x)) fn a() -> int { let b = 13; f!(b)}",
vec!(vec!(1)), false),
// the b before the plus should not be renamed (requires marks)
("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})) fn a() -> int { f!(b)}",
vec!(vec!(1)), false),
// the marks going in and out of letty should cancel, allowing that $x to
// capture the one following the semicolon.
// this was an awesome test case, and caught a *lot* of bugs.
("macro_rules! letty(($x:ident) => (let $x = 15;))
macro_rules! user(($x:ident) => ({letty!($x); $x}))
fn main() -> int {user!(z)}",
vec!(vec!(0)), false)
);
for (idx,s) in tests.iter().enumerate() {
run_renaming_test(s,idx);
}
}
// no longer a fixme #8062: this test exposes a *potential* bug; our system does
// not behave exactly like MTWT, but a conversation with Matthew Flatt
// suggests that this can only occur in the presence of local-expand, which
// we have no plans to support. ... unless it's needed for item hygiene....
#[ignore]
#[test] fn issue_8062(){
run_renaming_test(
&("fn main() {let hrcoo = 19; macro_rules! getx(()=>(hrcoo)); getx!();}",
vec!(vec!(0)), true), 0)
}
// FIXME #6994:
// the z flows into and out of two macros (g & f) along one path, and one
// (just g) along the other, so the result of the whole thing should
// be "let z_123 = 3; z_123"
#[ignore]
#[test] fn issue_6994(){
run_renaming_test(
&("macro_rules! g (($x:ident) =>
({macro_rules! f(($y:ident)=>({let $y=3;$x}));f!($x)}))
fn a(){g!(z)}",
vec!(vec!(0)),false),
0)
}
// match variable hygiene. Should expand into
// fn z() {match 8 {x_1 => {match 9 {x_2 | x_2 if x_2 == x_1 => x_2 + x_1}}}}
#[test] fn issue_9384(){
run_renaming_test(
&("macro_rules! bad_macro (($ex:expr) => ({match 9 {x | x if x == $ex => x + $ex}}))
fn z() {match 8 {x => bad_macro!(x)}}",
// NB: the third "binding" is the repeat of the second one.
vec!(vec!(1,3),vec!(0,2),vec!(0,2)),
true),
0)
}
// interpolated nodes weren't getting labeled.
// should expand into
// fn main(){let g1_1 = 13; g1_1}}
#[test] fn pat_expand_issue_15221(){
run_renaming_test(
&("macro_rules! inner ( ($e:pat ) => ($e))
macro_rules! outer ( ($e:pat ) => (inner!($e)))
fn main() { let outer!(g) = 13; g;}",
vec!(vec!(0)),
true),
0)
}
// create a really evil test case where a $x appears inside a binding of $x
// but *shouldn't* bind because it was inserted by a different macro....
// can't write this test case until we have macro-generating macros.
// method arg hygiene
// method expands to fn get_x(&self_0, x_1:int) {self_0 + self_2 + x_3 + x_1}
#[test] fn method_arg_hygiene(){
run_renaming_test(
&("macro_rules! inject_x (()=>(x))
macro_rules! inject_self (()=>(self))
struct A;
impl A{fn get_x(&self, x: int) {self + inject_self!() + inject_x!() + x;} }",
vec!(vec!(0),vec!(3)),
true),
0)
}
// ooh, got another bite?
// expands to struct A; impl A {fn thingy(&self_1) {self_1;}}
#[test] fn method_arg_hygiene_2(){
run_renaming_test(
&("struct A;
macro_rules! add_method (($T:ty) =>
(impl $T { fn thingy(&self) {self;} }))
add_method!(A)",
vec!(vec!(0)),
true),
0)
}
// item fn hygiene
// expands to fn q(x_1:int){fn g(x_2:int){x_2 + x_1};}
#[test] fn issue_9383(){
run_renaming_test(
&("macro_rules! bad_macro (($ex:expr) => (fn g(x:int){ x + $ex }))
fn q(x:int) { bad_macro!(x); }",
vec!(vec!(1),vec!(0)),true),
0)
}
// closure arg hygiene (ExprClosure)
// expands to fn f(){(|x_1 : int| {(x_2 + x_1)})(3);}
#[test] fn closure_arg_hygiene(){
run_renaming_test(
&("macro_rules! inject_x (()=>(x))
fn f(){(|x : int| {(inject_x!() + x)})(3);}",
vec!(vec!(1)),
true),
0)
}
// closure arg hygiene (ExprProc)
// expands to fn f(){(proc(x_1 : int) {(x_2 + x_1)})(3);}
#[test] fn closure_arg_hygiene_2(){
run_renaming_test(
&("macro_rules! inject_x (()=>(x))
fn f(){ (proc(x : int){(inject_x!() + x)})(3); }",
vec!(vec!(1)),
true),
0)
}
// macro_rules in method position. Sadly, unimplemented.
#[test] fn macro_in_method_posn(){
expand_crate_str(
"macro_rules! my_method (() => (fn thirteen(&self) -> int {13}))
struct A;
impl A{ my_method!()}
fn f(){A.thirteen;}".to_string());
}
// another nested macro
// expands to impl Entries {fn size_hint(&self_1) {self_1;}
#[test] fn item_macro_workaround(){
run_renaming_test(
&("macro_rules! item { ($i:item) => {$i}}
struct Entries;
macro_rules! iterator_impl {
() => { item!( impl Entries { fn size_hint(&self) { self;}})}}
iterator_impl! { }",
vec!(vec!(0)), true),
0)
}
// run one of the renaming tests
fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
let invalid_name = token::special_idents::invalid.name;
let (teststr, bound_connections, bound_ident_check) = match *t {
(ref str,ref conns, bic) => (str.to_string(), conns.clone(), bic)
};
let cr = expand_crate_str(teststr.to_string());
let bindings = crate_bindings(&cr);
let varrefs = crate_varrefs(&cr);
// must be one check clause for each binding:
assert_eq!(bindings.len(),bound_connections.len());
for (binding_idx,shouldmatch) in bound_connections.iter().enumerate() {
let binding_name = mtwt::resolve(bindings[binding_idx]);
let binding_marks = mtwt::marksof(bindings[binding_idx].ctxt, invalid_name);
// shouldmatch can't name varrefs that don't exist:
assert!((shouldmatch.len() == 0) ||
(varrefs.len() > *shouldmatch.iter().max().unwrap()));
for (idx,varref) in varrefs.iter().enumerate() {
let print_hygiene_debug_info = || {
// good lord, you can't make a path with 0 segments, can you?
let final_varref_ident = match varref.segments.last() {
Some(pathsegment) => pathsegment.identifier,
None => panic!("varref with 0 path segments?")
};
let varref_name = mtwt::resolve(final_varref_ident);
let varref_idents : Vec<ast::Ident>
= varref.segments.iter().map(|s| s.identifier)
.collect();
println!("varref #{}: {}, resolves to {}",idx, varref_idents, varref_name);
let string = token::get_ident(final_varref_ident);
println!("varref's first segment's string: \"{}\"", string.get());
println!("binding #{}: {}, resolves to {}",
binding_idx, bindings[binding_idx], binding_name);
mtwt::with_sctable(|x| mtwt::display_sctable(x));
};
if shouldmatch.contains(&idx) {
// it should be a path of length 1, and it should
// be free-identifier=? or bound-identifier=? to the given binding
assert_eq!(varref.segments.len(),1);
let varref_name = mtwt::resolve(varref.segments[0].identifier);
let varref_marks = mtwt::marksof(varref.segments[0]
.identifier
.ctxt,
invalid_name);
if !(varref_name==binding_name) {
println!("uh oh, should match but doesn't:");
print_hygiene_debug_info();
}
assert_eq!(varref_name,binding_name);
if bound_ident_check {
// we're checking bound-identifier=?, and the marks
// should be the same, too:
assert_eq!(varref_marks,binding_marks.clone());
}
} else {
let varref_name = mtwt::resolve(varref.segments[0].identifier);
let fail = (varref.segments.len() == 1)
&& (varref_name == binding_name);
// temp debugging:
if fail {
println!("failure on test {}",test_idx);
println!("text of test case: \"{}\"", teststr);
println!("");
println!("uh oh, matches but shouldn't:");
print_hygiene_debug_info();
}
assert!(!fail);
}
}
}
}
#[test] fn fmt_in_macro_used_inside_module_macro() {
let crate_str = "macro_rules! fmt_wrap(($b:expr)=>($b.to_string()))
macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}}))
foo_module!()
".to_string();
let cr = expand_crate_str(crate_str);
// find the xx binding
let bindings = crate_bindings(&cr);
let cxbinds: Vec<&ast::Ident> =
bindings.iter().filter(|b| {
let ident = token::get_ident(**b);
let string = ident.get();
"xx" == string
}).collect();
let cxbinds: &[&ast::Ident] = cxbinds.as_slice();
let cxbind = match cxbinds {
[b] => b,
_ => panic!("expected just one binding for ext_cx")
};
let resolved_binding = mtwt::resolve(*cxbind);
let varrefs = crate_varrefs(&cr);
// the xx binding should bind all of the xx varrefs:
for (idx,v) in varrefs.iter().filter(|p| {
p.segments.len() == 1
&& "xx" == token::get_ident(p.segments[0].identifier).get()
}).enumerate() {
if mtwt::resolve(v.segments[0].identifier) != resolved_binding {
println!("uh oh, xx binding didn't match xx varref:");
println!("this is xx varref \\# {}", idx);
println!("binding: {}", cxbind);
println!("resolves to: {}", resolved_binding);
println!("varref: {}", v.segments[0].identifier);
println!("resolves to: {}",
mtwt::resolve(v.segments[0].identifier));
mtwt::with_sctable(|x| mtwt::display_sctable(x));
}
assert_eq!(mtwt::resolve(v.segments[0].identifier),
resolved_binding);
};
}
#[test]
fn pat_idents(){
let pat = string_to_pat(
"(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_string());
let idents = pattern_bindings(&*pat);
assert_eq!(idents, strs_to_idents(vec!("a","c","b","d")));
}
// test the list of identifier patterns gathered by the visitor. Note that
// 'None' is listed as an identifier pattern because we don't yet know that
// it's the name of a 0-ary variant, and that 'i' appears twice in succession.
#[test]
fn crate_bindings_test(){
let the_crate = string_to_crate("fn main (a : int) -> int {|b| {
match 34 {None => 3, Some(i) | i => j, Foo{k:z,l:y} => \"banana\"}} }".to_string());
let idents = crate_bindings(&the_crate);
assert_eq!(idents, strs_to_idents(vec!("a","b","None","i","i","z","y")));
}
// test the IdentRenamer directly
#[test]
fn ident_renamer_test () {
let the_crate = string_to_crate("fn f(x : int){let x = x; x}".to_string());
let f_ident = token::str_to_ident("f");
let x_ident = token::str_to_ident("x");
let int_ident = token::str_to_ident("int");
let renames = vec!((x_ident,Name(16)));
let mut renamer = IdentRenamer{renames: &renames};
let renamed_crate = renamer.fold_crate(the_crate);
let idents = crate_idents(&renamed_crate);
let resolved : Vec<ast::Name> = idents.iter().map(|id| mtwt::resolve(*id)).collect();
assert_eq!(resolved,vec!(f_ident.name,Name(16),int_ident.name,Name(16),Name(16),Name(16)));
}
// test the PatIdentRenamer; only PatIdents get renamed
#[test]
fn pat_ident_renamer_test () {
let the_crate = string_to_crate("fn f(x : int){let x = x; x}".to_string());
let f_ident = token::str_to_ident("f");
let x_ident = token::str_to_ident("x");
let int_ident = token::str_to_ident("int");
let renames = vec!((x_ident,Name(16)));
let mut renamer = PatIdentRenamer{renames: &renames};
let renamed_crate = renamer.fold_crate(the_crate);
let idents = crate_idents(&renamed_crate);
let resolved : Vec<ast::Name> = idents.iter().map(|id| mtwt::resolve(*id)).collect();
let x_name = x_ident.name;
assert_eq!(resolved,vec!(f_ident.name,Name(16),int_ident.name,Name(16),x_name,x_name));
}
}<|fim▁end|> | fld.cx.bt_push(ExpnInfo {
call_site: attr.span, |
<|file_name|>hubitat.go<|end_file_name|><|fim▁begin|>package insteon
// HubitatConfiguration contains the Hubitat configuration.
type HubitatConfiguration struct {
HubURL string `yaml:"hub_url"`
}
// HubitatEvent represents a Hubitat event.
type HubitatEvent struct {
Alias string `json:"id"`
State LightState `json:"state"`<|fim▁hole|><|fim▁end|> | } |
<|file_name|>normalizeUrl.js<|end_file_name|><|fim▁begin|>function normalizeUrl(url){
// parseUri 1.2.2
// (c) Steven Levithan <stevenlevithan.com>
// MIT License
function parseUri (str) {
var o = parseUri.options,
m = o.parser[o.strictMode ? "strict" : "loose"].exec(str),
uri = {},
i = 14;
while (i--) uri[o.key[i]] = m[i] || "";
uri[o.q.name] = {};
uri[o.key[12]].replace(o.q.parser, function ($0, $1, $2) {
if ($1) uri[o.q.name][$1] = $2;
});
<|fim▁hole|> return uri;
};
parseUri.options = {
strictMode: false,
key: ["source","protocol","authority","userInfo","user","password","host","port","relative","path","directory","file","query","anchor"],
q: {
name: "queryKey",
parser: /(?:^|&)([^&=]*)=?([^&]*)/g
},
parser: {
strict: /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/,
loose: /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/
}
};
//////////////////////////////////////////////////////////////////////////
// //
//////////////////////////////////////////////////////////////////////////
// parse the url
var uri = parseUri(url);
//console.log("BEFORE uri.path", uri.path)
var paths = uri.path.split('/');
//console.log("paths", JSON.stringify(paths))
// remove empty path '//' or null path '/./'
paths = paths.filter(function(str, idx){
if( idx === 0 ) return true;
if( str === '' ) return false;
if( str === '.' ) return false;
return true;
});
//console.log("paths", JSON.stringify(paths))
// handle the parent path '..'
for(var i = 0; i < paths.length; i++ ){
//console.log(i+"th", paths[i], "paths", JSON.stringify(paths))
if( i >= 1 && paths[i+1] === '..' ){
//console.log("BEFORE", i+"th", paths[i], "paths", JSON.stringify(paths))
paths.splice(i, 2);
i -= 2;
//console.log("AFTER", i+"th", paths[i], "paths", JSON.stringify(paths))
}else if( paths[i] === '..' ){
//console.log("BEFORE", i+"th", paths[i], "paths", JSON.stringify(paths))
paths.splice(i, 1);
i -= 1;
//console.log("AFTER", i+"th", paths[i], "paths", JSON.stringify(paths))
}
}
//console.log("paths", JSON.stringify(paths))
// reassemble uri.path
uri.path = paths.join('/');
//console.log("AFTER uri.path", uri.path)
// build the newUrl
var newUrl = uri.protocol+"://"+uri.authority+uri.path
+ (uri.query ? '?'+uri.query :'')
+ (uri.anchor ? '#'+uri.anchor:'');
// return the newUrl
return newUrl;
}
// export in common js
if( typeof module !== "undefined" && ('exports' in module)){
module.exports = normalizeUrl
}<|fim▁end|> | |
<|file_name|>MeshListener.hpp<|end_file_name|><|fim▁begin|>/* Sirikata Object Host
* MeshListener.hpp
*
* Copyright (c) 2009, Daniel Reiter Horn
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Sirikata nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING<|fim▁hole|> */
#ifndef _SIRIKATA_MESH_LISTENER_HPP_
#define _SIRIKATA_MESH_LISTENER_HPP_
#include <sirikata/core/transfer/URI.hpp>
#include <sirikata/mesh/Meshdata.hpp>
#include <sirikata/proxyobject/ProxyObject.hpp>
namespace Sirikata {
class SIRIKATA_PROXYOBJECT_EXPORT MeshListener
{
public:
virtual ~MeshListener() {}
virtual void onSetMesh (ProxyObjectPtr proxy, Transfer::URI const& newMesh, const SpaceObjectReference& sporef) = 0;
virtual void onSetScale (ProxyObjectPtr proxy, float32 newScale,const SpaceObjectReference& sporef ) = 0;
virtual void onSetPhysics (ProxyObjectPtr proxy, const String& phy,const SpaceObjectReference& sporef ) {};
virtual void onSetIsAggregate (ProxyObjectPtr proxy, bool isAggregate, const SpaceObjectReference& sporef ) = 0;
};
} // namespace Sirikata
#endif<|fim▁end|> | * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
<|file_name|>description.py<|end_file_name|><|fim▁begin|>"""
Helper classes for creating frontend metadata
"""
class ContactPersonDesc(object):
"""
Description class for a contact person
"""
def __init__(self):
self.contact_type = None
self._email_address = []
self.given_name = None
self.sur_name = None
def add_email_address(self, address):
"""
Adds an email address to the person description
:type address: str
:param address: Address to be added
"""
self._email_address.append(address)
def to_dict(self):
"""
Returns a dictionary representation of the ContactPersonDesc.
The format is the same as a pysaml2 configuration for a contact person.
:rtype: dict[str, str]
:return: A dictionary representation
"""
person = {}
if self.contact_type:
person["contact_type"] = self.contact_type
if self._email_address:
person["email_address"] = self._email_address
if self.given_name:
person["given_name"] = self.given_name
if self.sur_name:
person["sur_name"] = self.sur_name
return person
class UIInfoDesc(object):
"""
Description class for UI info
"""
def __init__(self):
self._description = []
self._display_name = []
self._logos = []
def add_description(self, text, lang):
"""
Binds a description to the given language
:type text: str
:type lang: str
:param text: Description
:param lang: description language
"""
self._description.append({"text": text, "lang": lang})
def add_display_name(self, text, lang):<|fim▁hole|> :type text: str
:type lang: str
:param text: Display name
:param lang: Language
"""
self._display_name.append({"text": text, "lang": lang})
def add_logo(self, text, width, height, lang=None):
"""
Binds a logo to the given language
:type text: str
:type width: str
:type height: str
:type lang: Optional[str]
:param text: Path to logo
:param width: width of logo
:param height: height of logo
:param lang: language
"""
logo_entry ={"text": text, "width": width, "height": height}
if lang:
logo_entry["lang"] = lang
self._logos.append(logo_entry)
def to_dict(self):
"""
Returns a dictionary representation of the UIInfoDesc object.
The format is the same as a pysaml2 configuration for ui info.
:rtype: dict[str, str]
:return: A dictionary representation
"""
ui_info = {}
if self._description:
ui_info["description"] = self._description
if self._display_name:
ui_info["display_name"] = self._display_name
if self._logos:
ui_info["logo"] = self._logos
return {"service": {"idp": {"ui_info": ui_info}}} if ui_info else {}
class OrganizationDesc(object):
"""
Description class for an organization
"""
def __init__(self):
self._display_name = []
self._name = []
self._url = []
def add_display_name(self, name, lang):
"""
Binds a display name to the given language
:type name: str
:type lang: str
:param name: display name
:param lang: language
"""
self._display_name.append((name, lang))
def add_name(self, name, lang):
"""
Binds a name to the given language
:type name: str
:type lang: str
:param name: Name of the organization
:param lang: language
"""
self._name.append((name, lang))
def add_url(self, url, lang):
"""
Binds an url to the given language
:type url: str
:type lang: str
:param url: url to bind
:param lang: language
"""
self._url.append((url, lang))
def to_dict(self):
"""
Returns a dictionary representation of the OrganizationDesc object.
The format is the same as a pysaml2 configuration for organization.
:rtype: dict[str, str]
:return: A dictionary representation
"""
org = {}
if self._display_name:
org["display_name"] = self._display_name
if self._name:
org["name"] = self._name
if self._url:
org["url"] = self._url
return {"organization": org} if org else {}
class MetadataDescription(object):
"""
Description class for a backend module
"""
def __init__(self, entity_id):
self.entity_id = entity_id
self._organization = None
self._contact_person = []
self._ui_info = None
def organization(self, organization):
"""
Set an organization to the description
:type organization: satosa.metadata_creation.description.OrganizationDesc
:param organization: Organization description
"""
if not isinstance(organization, OrganizationDesc):
raise TypeError("organization must be of type OrganizationDesc")
self._organization = organization
organization = property(None, organization)
def add_contact_person(self, person):
"""
Adds a contact person to the description
:type person: satosa.metadata_creation.description.ContactPersonDesc
:param person: The contact person to be added
"""
if not isinstance(person, ContactPersonDesc):
raise TypeError("person must be of type ContactPersonDesc")
self._contact_person.append(person)
def ui_info(self, ui_info):
"""
Set an ui info to the description
:type ui_info: satosa.metadata_creation.description.UIInfoDesc
:param ui_info: The ui info to be set
"""
if not isinstance(ui_info, UIInfoDesc):
raise TypeError("ui_info must be of type UIInfoDesc")
self._ui_info = ui_info
ui_info = property(None, ui_info)
def to_dict(self):
"""
Returns a dictionary representation of the MetadataDescription object.
The format is the same as a pysaml2 configuration
:rtype: dict[str, Any]
:return: A dictionary representation
"""
description = {}
description["entityid"] = self.entity_id
if self._organization:
description.update(self._organization.to_dict())
if self._contact_person:
description['contact_person'] = []
for person in self._contact_person:
description['contact_person'].append(person.to_dict())
if self._ui_info:
description.update(self._ui_info.to_dict())
return description<|fim▁end|> | """
Binds a display name to the given language
|
<|file_name|>compat.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Compatibility functions for unified behavior between Python 2.x and 3.x.
:author: Alex Grönholm
"""
from __future__ import unicode_literals, absolute_import
import inspect
import sys
from threading import Thread
if sys.version_info[0] < 3:
def items(d):
return d.items()
def iteritems(d):
return d.iteritems()
def next(x):
return x.next()
range = xrange # noqa
long = long # noqa
basestring = basestring # noqa
unicode = unicode # noqa
bytearray2 = bytearray
unichr = unichr # noqa
bytestr = str
tobytestr = str
def isbytestr(s):
return isinstance(s, str)
def ispython3bytestr(s):
return False
def isbytearray(s):
return isinstance(s, bytearray)
def bytetoint(b):
return ord(b)
def bytetostr(b):
return b
def strtobyte(b):
return b
import Queue
Queue = Queue.Queue
else:<|fim▁hole|>
def iteritems(d):
return d.items()
next = next
range = range
long = int
basestring = str
unicode = str
bytearray2 = bytes
unichr = chr
bytestr = bytes
def tobytestr(s):
return bytes(s, "ascii")
def isbytestr(s):
return isinstance(s, bytes)
def ispython3bytestr(s):
return isinstance(s, bytes)
def isbytearray(s):
return isinstance(s, bytearray)
def bytetoint(b):
return b
def bytetostr(b):
return str(b, encoding="ascii")
def strtobyte(s):
return bytes(s, encoding="ascii")
import queue
Queue = queue.Queue
if hasattr(inspect, "getattr_static"):
def hasattr2(obj, attr):
return bool(inspect.getattr_static(obj, attr, False))
else:
hasattr2 = hasattr
class CompatThread(Thread):
"""Compatibility Thread class.
Allows Python 2 Thread class to accept daemon kwarg in init.
"""
def __init__(self, *args, **kwargs):
daemon = None
try:
daemon = kwargs.pop("daemon")
except KeyError:
pass
super(CompatThread, self).__init__(*args, **kwargs)
if daemon:
self.daemon = daemon<|fim▁end|> | def items(d):
return list(d.items()) |
<|file_name|>point.rs<|end_file_name|><|fim▁begin|>use std::f64::consts::PI;
use data::FloatPoint;
fn point_f64_f64_(a: &f64, b: &f64) -> FloatPoint {
FloatPoint::new(*a, *b)
}
wrap_2_arg!(point_f64_f64, point_f64_f64_);
fn add_fpt_fpt_(a: &FloatPoint, b: &FloatPoint) -> FloatPoint {
FloatPoint::new(a.x + b.x, a.y + b.y)
}
wrap_2_arg!(add_fpt_fpt, add_fpt_fpt_);
fn add_fpt_f64_(a: &FloatPoint, b: &f64) -> FloatPoint {
FloatPoint::new(a.x + *b, a.y + *b)
}
wrap_2_arg!(add_fpt_f64, add_fpt_f64_);
fn add_f64_fpt_(a: &f64, b: &FloatPoint) -> FloatPoint {
FloatPoint::new(*a + b.x, *a + b.y)
}
wrap_2_arg!(add_f64_fpt, add_f64_fpt_);
fn multiply_fpt_fpt_(a: &FloatPoint, b: &FloatPoint) -> FloatPoint {
FloatPoint::new(a.x * b.x, a.y * b.y)
}
wrap_2_arg!(multiply_fpt_fpt, multiply_fpt_fpt_);
fn multiply_fpt_f64_(a: &FloatPoint, b: &f64) -> FloatPoint {
FloatPoint::new(a.x * *b, a.y * *b)
}
wrap_2_arg!(multiply_fpt_f64, multiply_fpt_f64_);
fn multiply_f64_fpt_(a: &f64, b: &FloatPoint) -> FloatPoint {
FloatPoint::new(*a * b.x, *a * b.y)
}
wrap_2_arg!(multiply_f64_fpt, multiply_f64_fpt_);
fn subtract_fpt_fpt_(a: &FloatPoint, b: &FloatPoint) -> FloatPoint {
FloatPoint::new(a.x - b.x, a.y - b.y)
}
wrap_2_arg!(subtract_fpt_fpt, subtract_fpt_fpt_);
fn subtract_fpt_f64_(a: &FloatPoint, b: &f64) -> FloatPoint {
FloatPoint::new(a.x - *b, a.y - *b)
}
wrap_2_arg!(subtract_fpt_f64, subtract_fpt_f64_);
fn subtract_f64_fpt_(a: &f64, b: &FloatPoint) -> FloatPoint {
FloatPoint::new(*a - b.x, *a - b.y)
}
wrap_2_arg!(subtract_f64_fpt, subtract_f64_fpt_);
fn divide_fpt_fpt_(a: &FloatPoint, b: &FloatPoint) -> FloatPoint {
FloatPoint::new(a.x / b.x, a.y / b.y)
}
wrap_2_arg!(divide_fpt_fpt, divide_fpt_fpt_);
fn divide_fpt_f64_(a: &FloatPoint, b: &f64) -> FloatPoint {
FloatPoint::new(a.x / *b, a.y / *b)
}
wrap_2_arg!(divide_fpt_f64, divide_fpt_f64_);
fn divide_f64_fpt_(a: &f64, b: &FloatPoint) -> FloatPoint {
FloatPoint::new(*a / b.x, *a / b.y)
}
wrap_2_arg!(divide_f64_fpt, divide_f64_fpt_);
fn equal_fpt_fpt_(a: &FloatPoint, b: &FloatPoint) -> bool {
a.x == b.x && a.y == b.y
}
wrap_2_arg!(equal_fpt_fpt, equal_fpt_fpt_);
fn unequal_fpt_fpt_(a: &FloatPoint, b: &FloatPoint) -> bool {
a.x != b.x || a.y != b.y
}
wrap_2_arg!(unequal_fpt_fpt, unequal_fpt_fpt_);
fn polar_f64_f64_(radius: &f64, angle: &f64) -> FloatPoint {
let radians = angle.to_radians();
<|fim▁hole|> let y = *radius * radians.sin();
FloatPoint::new(x, y)
}
wrap_2_arg!(polar_f64_f64, polar_f64_f64_);
fn rotate_fpt_fpt_f64_(target: &FloatPoint, origin: &FloatPoint, angle: &f64) -> FloatPoint {
let radians = angle.to_radians();
let s = radians.sin();
let c = radians.cos();
let x = target.x - origin.x;
let y = target.y - origin.y;
FloatPoint::new(
x * c - y * s + origin.x,
x * s + y * c + origin.y,
)
}
wrap_3_arg!(rotate_fpt_fpt_f64, rotate_fpt_fpt_f64_);
fn flip_x_fpt_f64_f64_(target: &FloatPoint, x: &f64, amount: &f64) -> FloatPoint {
let stretch = (amount * PI).cos();
FloatPoint::new(
(target.x - x) * stretch + x,
target.y,
)
}
wrap_3_arg!(flip_x_fpt_f64_f64, flip_x_fpt_f64_f64_);
fn flip_y_fpt_f64_f64_(target: &FloatPoint, y: &f64, amount: &f64) -> FloatPoint {
let stretch = (amount * PI).cos();
FloatPoint::new(
target.x,
(target.y - y) * stretch + y,
)
}
wrap_3_arg!(flip_y_fpt_f64_f64, flip_y_fpt_f64_f64_);<|fim▁end|> | let x = *radius * radians.cos(); |
<|file_name|>instance.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="node" />
import * as fs from 'fs';
import * as ts from 'typescript';
import { Checker } from './checker';
import { CompilerInfo, LoaderConfig, TsConfig } from './interfaces';
export interface Instance {
id: number;
babelImpl?: any;
compiledFiles: {
[key: string]: boolean;
};
configFilePath: string;
compilerConfig: TsConfig;
loaderConfig: LoaderConfig;
checker: Checker;
cacheIdentifier: any;
context: string;
}
export interface Compiler {
inputFileSystem: typeof fs;
_tsInstances: {<|fim▁hole|> };
}
export interface Loader {
_compiler: Compiler;
cacheable: () => void;
query: string;
async: () => (err: Error, source?: string, map?: string) => void;
resourcePath: string;
resolve: () => void;
addDependency: (dep: string) => void;
clearDependencies: () => void;
emitFile: (fileName: string, text: string) => void;
emitWarning: (msg: string) => void;
emitError: (msg: string) => void;
options: {
ts?: LoaderConfig;
};
}
export declare type QueryOptions = LoaderConfig & ts.CompilerOptions;
export declare function getRootCompiler(compiler: any): any;
export declare function ensureInstance(webpack: Loader, query: QueryOptions, options: LoaderConfig, instanceName: string, rootCompiler: any): Instance;
export declare function setupTs(compiler: string): CompilerInfo;
export interface Configs {
configFilePath: string;
compilerConfig: TsConfig;
loaderConfig: LoaderConfig;
}
export declare function readConfigFile(context: string, query: QueryOptions, options: LoaderConfig, tsImpl: typeof ts): Configs;<|fim▁end|> | [key: string]: Instance;
};
options: {
watch: boolean; |
<|file_name|>packet5.py<|end_file_name|><|fim▁begin|>from . import packet
class Packet5(packet.Packet):
def __init__(self, player, slot):
super(Packet5, self).__init__(0x5)
self.add_data(player.playerID)
self.add_data(slot)<|fim▁hole|> self.add_data(0) # Prefix
self.add_structured_data("<h", 0) # ItemID<|fim▁end|> | self.add_structured_data("<h", 0) # Stack |
<|file_name|>dstr-dflt-obj-ptrn-id-get-value-err.js<|end_file_name|><|fim▁begin|><|fim▁hole|>// - src/dstr-binding/obj-ptrn-id-get-value-err.case
// - src/dstr-binding/error/gen-func-decl-dflt.template
/*---
description: Error thrown when accessing the corresponding property of the value object (generator function declaration (default parameter))
esid: sec-generator-function-definitions-runtime-semantics-instantiatefunctionobject
es6id: 14.4.12
features: [generators, destructuring-binding, default-parameters]
flags: [generated]
info: |
GeneratorDeclaration : function * ( FormalParameters ) { GeneratorBody }
[...]
2. Let F be GeneratorFunctionCreate(Normal, FormalParameters,
GeneratorBody, scope, strict).
[...]
9.2.1 [[Call]] ( thisArgument, argumentsList)
[...]
7. Let result be OrdinaryCallEvaluateBody(F, argumentsList).
[...]
9.2.1.3 OrdinaryCallEvaluateBody ( F, argumentsList )
1. Let status be FunctionDeclarationInstantiation(F, argumentsList).
[...]
9.2.12 FunctionDeclarationInstantiation(func, argumentsList)
[...]
23. Let iteratorRecord be Record {[[iterator]]:
CreateListIterator(argumentsList), [[done]]: false}.
24. If hasDuplicates is true, then
[...]
25. Else,
b. Let formalStatus be IteratorBindingInitialization for formals with
iteratorRecord and env as arguments.
[...]
13.3.3.7 Runtime Semantics: KeyedBindingInitialization
SingleNameBinding : BindingIdentifier Initializeropt
[...]
4. Let v be GetV(value, propertyName).
5. ReturnIfAbrupt(v).
---*/
var poisonedProperty = Object.defineProperty({}, 'poisoned', {
get: function() {
throw new Test262Error();
}
});
function* f({ poisoned } = poisonedProperty) {}
assert.throws(Test262Error, function() {
f();
});<|fim▁end|> | // This file was procedurally generated from the following sources: |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
'''Test on server shutdown when a zone transaction is open.'''
import psutil
from dnstest.libknot import libknot
from dnstest.test import Test
from dnstest.utils import *
t = Test()
knot = t.server("knot")
zone = t.zone("example.com.")
t.link(zone, knot)
ctl = libknot.control.KnotCtl()
t.start()
ctl.connect(os.path.join(knot.dir, "knot.sock"))<|fim▁hole|>ctl.send_block(cmd="zone-begin", zone=zone[0].name)
ctl.receive_block()
ctl.send(libknot.control.KnotCtlType.END)
ctl.close()
knot.stop()
t.sleep(1)
if psutil.pid_exists(knot.proc.pid):
set_err("Server still running")
t.end()<|fim▁end|> | |
<|file_name|>basic_octdiff_cons.hpp<|end_file_name|><|fim▁begin|>// $flavio.lisboa @ 2017-09-04.
//
/*
* @file basic_octdiff_cons.hpp
*/
#ifndef adl__oct__cons__basic_octdiff_cons__hpp__
#define adl__oct__cons__basic_octdiff_cons__hpp__
#include <type_traits>
#include <string>
#include <iosfwd>
#include <stdexcept>
#include "adl.cfg.hpp"
#include "adl/oct.fwd.hpp"
#include "adl/oct/constant.hpp"
#include "adl/oct/traits.hpp"
#include "adl/oct/var.hpp"
#include "adl/oct/vexpr.hpp"
#include "adl/oct/cons/cons_base_.hpp"
//
// [[ API ]]
//
adl_BEGIN_ROOT_MODULE
namespace oct {
<|fim▁hole|>
public:
using typename superclass_::var_type;
using typename superclass_::constant_type;
using typename superclass_::vexpr_type;
using superclass_::cons_base_;
using superclass_::operator=;
using superclass_::space;
using superclass_::vexpr_;
using superclass_::c_;
static_assert(space == domain_space::octdiff, "Wrong variable type.");
constexpr basic_octdiff_cons() noexcept = default;
constexpr basic_octdiff_cons(basic_octdiff_cons const&) noexcept = default;
constexpr basic_octdiff_cons(basic_octdiff_cons &&) noexcept = default;
constexpr basic_octdiff_cons& operator=(basic_octdiff_cons const&) noexcept = default;
constexpr basic_octdiff_cons& operator=(basic_octdiff_cons &&) noexcept = default;
constexpr basic_octdiff_cons(vexpr_type vexpr, constant_type c);
constexpr static basic_octdiff_cons make_upper_limit(vexpr_type vexpr, constant_type c) noexcept; // +-xi [+- xj] <= c
template <
typename ConstantType_,
typename VarType_,
typename = std::enable_if_t<
common_var<VarType_>::is_octdiff_space
&& (!std::is_same<VarType_, var_type>::value || !std::is_same<ConstantType_, constant_type>::value)
&& std::is_convertible<ConstantType_, constant_type>::value>>
constexpr basic_octdiff_cons(basic_octdiff_cons<ConstantType_, VarType_> cons) noexcept;
template <
typename VarType_,
typename = std::enable_if_t<
common_var<VarType_>::is_octdiff_space
&& !std::is_same<VarType_, var_type>::value>>
explicit constexpr basic_octdiff_cons(basic_octdiff_vexpr<VarType_> vexpr) noexcept;
template <
typename VarType_ = var_type,
typename = std::enable_if_t<common_var<VarType_>::is_octdiff_space>>
constexpr basic_octdiff_vexpr<VarType_> to_vexpr() const noexcept;
template <
typename VarType_ = var_type,
typename = std::enable_if_t<common_var<VarType_>::is_octdiff_space>>
constexpr operator basic_octdiff_vexpr<VarType_>() const noexcept;
private:
friend class basic_octdiff_conjunction<ConstantType, VarType>;
constexpr basic_octdiff_cons& commute() noexcept;
constexpr basic_octdiff_cons to_commuted() const noexcept;
};
template <typename ConstantType> constexpr octdiff_cons<ConstantType> to_identity(octdiff_cons<ConstantType> cons) { return cons; }
template <typename ConstantType> constexpr octdiff_cons<ConstantType> to_identity(octdiff_lcons<ConstantType> cons) { return cons.to_identity(); }
template <
typename ConstantType,
typename VarType,
typename = std::enable_if_t<common_var<VarType>::is_octdiff_space>>
constexpr basic_octdiff_cons<ConstantType, VarType>
make_upper_limit(basic_octdiff_vexpr<VarType> vexpr, ConstantType c) noexcept {
return basic_octdiff_cons<ConstantType, VarType>::make_upper_limit(vexpr, c);
};
template <
typename ConstantType,
typename VarType,
typename = std::enable_if_t<common_var<VarType>::is_octdiff_space>>
constexpr basic_octdiff_cons<ConstantType, VarType> make_cons(basic_octdiff_vexpr<VarType> vexpr, ConstantType c) noexcept {
return basic_octdiff_cons<ConstantType, VarType>(vexpr, c);
};
} // namespace oct
namespace dsl {
inline namespace oct {
inline namespace cons {
template <
typename ConstantType,
typename VarType,
typename = std::enable_if_t<
adl::oct::common_var<VarType>::is_octdiff_space
&& std::is_arithmetic<ConstantType>::value>>
constexpr adl::oct::basic_octdiff_cons<ConstantType, VarType> operator<=(
adl::oct::basic_octdiff_vexpr<VarType> vexpr,
ConstantType rhs
) noexcept {
return adl::oct::make_upper_limit(vexpr, rhs);
};
}
}
}
namespace operators {
inline namespace oct {
inline namespace cons {
inline namespace comparison {
template <typename ConstantType,
typename VarType,
typename = std::enable_if_t<adl::oct::common_var<VarType>::is_octdiff_space>>
constexpr bool operator<(adl::oct::basic_octdiff_cons<ConstantType, VarType> lhs, adl::oct::basic_octdiff_cons<ConstantType, VarType> const& rhs) noexcept { return lhs.compare(rhs) < 0; }
template <typename ConstantType,
typename VarType,
typename = std::enable_if_t<adl::oct::common_var<VarType>::is_octdiff_space>>
constexpr bool operator<=(adl::oct::basic_octdiff_cons<ConstantType, VarType> lhs, adl::oct::basic_octdiff_cons<ConstantType, VarType> const& rhs) noexcept { return lhs.compare(rhs) <= 0; }
template <typename ConstantType,
typename VarType,
typename = std::enable_if_t<adl::oct::common_var<VarType>::is_octdiff_space>>
constexpr bool operator==(adl::oct::basic_octdiff_cons<ConstantType, VarType> lhs, adl::oct::basic_octdiff_cons<ConstantType, VarType> const& rhs) noexcept { return lhs.equals(rhs); }
template <typename ConstantType,
typename VarType,
typename = std::enable_if_t<adl::oct::common_var<VarType>::is_octdiff_space>>
constexpr bool operator!=(adl::oct::basic_octdiff_cons<ConstantType, VarType> lhs, adl::oct::basic_octdiff_cons<ConstantType, VarType> const& rhs) noexcept { return !lhs.equals(rhs); }
template <typename ConstantType,
typename VarType,
typename = std::enable_if_t<adl::oct::common_var<VarType>::is_octdiff_space>>
constexpr bool operator>=(adl::oct::basic_octdiff_cons<ConstantType, VarType> lhs, adl::oct::basic_octdiff_cons<ConstantType, VarType> const& rhs) noexcept { return lhs.compare(rhs) >= 0; }
template <typename ConstantType,
typename VarType,
typename = std::enable_if_t<adl::oct::common_var<VarType>::is_octdiff_space>>
constexpr bool operator>(adl::oct::basic_octdiff_cons<ConstantType, VarType> lhs, adl::oct::basic_octdiff_cons<ConstantType, VarType> const& rhs) noexcept { return lhs.compare(rhs) > 0; }
}
}
}
}
adl_END_ROOT_MODULE
template <typename ConstantType,
typename VarType,
typename Traits,
typename = std::enable_if_t<adl::oct::common_var<VarType>::is_octdiff_space>>
std::basic_ostream<char, Traits>& operator<<(
std::basic_ostream<char, Traits>& os,
adl::oct::basic_octdiff_cons<ConstantType, VarType> const& cons
) {
cons.print(os);
return os;
};
//
// [[ TEMPLATE IMPLEMENTATION ]]
//
#include "adl/oct/cons/basic_oct_cons.hpp"
#include "adl/oct/cons/basic_octdiff_conjunction.hpp"
adl_BEGIN_ROOT_MODULE
namespace oct {
//
// adl::oct::basic_octdiff_cons
//
template <typename ConstantType, typename VarType>
constexpr basic_octdiff_cons<ConstantType, VarType>::basic_octdiff_cons(vexpr_type vexpr, constant_type c) : superclass_(vexpr, c) {};
template <typename ConstantType, typename VarType>
constexpr basic_octdiff_cons<ConstantType, VarType>
basic_octdiff_cons<ConstantType, VarType>::make_upper_limit(vexpr_type vexpr, constant_type c) noexcept {
return basic_octdiff_cons<ConstantType, VarType>(vexpr, c);
};
template <typename ConstantType, typename VarType>
template <typename ConstantType_, typename VarType_, typename>
constexpr basic_octdiff_cons<ConstantType, VarType>::basic_octdiff_cons(
basic_octdiff_cons<ConstantType_, VarType_> cons
) noexcept : basic_octdiff_cons(vexpr_type(cons.xi(), cons.xj()), cons.c()) {};
template <typename ConstantType, typename VarType>
template <typename VarType_, typename>
constexpr basic_octdiff_cons<ConstantType, VarType>::basic_octdiff_cons(
basic_octdiff_vexpr<VarType_> vexpr
) noexcept : basic_octdiff_cons(vexpr, constant_type()) {};
template <typename ConstantType, typename VarType>
template <typename VarType_, typename>
constexpr basic_octdiff_vexpr<VarType_> basic_octdiff_cons<ConstantType, VarType>::to_vexpr() const noexcept {
return basic_octdiff_vexpr<VarType_>(this->xi(), this->xj());
};
template <typename ConstantType, typename VarType>
template <typename VarType_, typename>
constexpr basic_octdiff_cons<ConstantType, VarType>::operator basic_octdiff_vexpr<VarType_>() const noexcept {
return to_vexpr<VarType_>();
};
template <typename ConstantType, typename VarType>
constexpr basic_octdiff_cons<ConstantType, VarType>& basic_octdiff_cons<ConstantType, VarType>::commute() noexcept {
if (!vexpr_.unit()) vexpr_ = vexpr_type(vexpr_.xJ(), vexpr_.xI());
return *this;
}
template <typename ConstantType, typename VarType>
constexpr basic_octdiff_cons<ConstantType, VarType> basic_octdiff_cons<ConstantType, VarType>::to_commuted() const noexcept {
return basic_octdiff_cons<ConstantType, VarType>(*this).commute();
}
} // namespace oct
adl_END_ROOT_MODULE
#endif // adl__oct__cons__basic_octdiff_cons__hpp__<|fim▁end|> | template <typename ConstantType, typename VarType>
class basic_octdiff_cons : public cons_base_<ConstantType, VarType> {
private:
using superclass_ = cons_base_<ConstantType, VarType>; |
<|file_name|>number_arabic.py<|end_file_name|><|fim▁begin|>#
# This file is part of Dragonfly.
# (c) Copyright 2007, 2008 by Christo Butcher
# Licensed under the LGPL.
#
# Dragonfly is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragonfly is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Dragonfly. If not, see
# <http://www.gnu.org/licenses/>.
#
"""
Arabic language implementations of Integer and Digits classes
============================================================================
"""
from ..base.integer_internal import (MapIntBuilder, CollectionIntBuilder,
MagnitudeIntBuilder, IntegerContentBase)
from ..base.digits_internal import DigitsContentBase
#---------------------------------------------------------------------------
int_0 = MapIntBuilder({
"صفر": 0,
})
int_1_9 = MapIntBuilder({
"واحد": 1,
"اثنان": 2,
"ثلاثة": 3,
"اربعة": 4,
"خمسة": 5,
"ستة": 6,
"سبعة": 7,
"ثمانية": 8,
"تسعة": 9,
})
int_10_19 = MapIntBuilder({
"عشرة": 10,
"احدى عشر": 11,
"اثنا عشر": 12,
"ثلاثة عشر": 13,
"اربعة عشر": 14,
"خمسة عشر": 15,
"ستة عشر": 16,
"سبعة عشر": 17,
"ثمانية عشر": 18,
"تسعة عشر": 19,
})
int_20_90_10 = MapIntBuilder({
"عشرون": 2,
"ثلاثون": 3,
"اربعون": 4,
"خمسون": 5,
"ستون": 6,
"سبعون": 7,
"ثمانون": 8,
"تسعون": 9,
})
int_20_99 = MagnitudeIntBuilder(
factor = 10,
spec = "<multiplier> [<remainder>]",
multipliers = [int_20_90_10],
remainders = [int_1_9],
)
int_and_1_99 = CollectionIntBuilder(
spec = "[و] <element>",
<|fim▁hole|> factor = 100,
spec = "[<multiplier>] hundred [<remainder>]",
multipliers = [int_1_9],
remainders = [int_and_1_99],
)
int_100big = MagnitudeIntBuilder(
factor = 100,
spec = "[<multiplier>] hundred [<remainder>]",
multipliers = [int_10_19, int_20_99],
remainders = [int_و_1_99]
)
int_1000s = MagnitudeIntBuilder(
factor = 1000,
spec = "[<multiplier>] thousand [<remainder>]",
multipliers = [int_1_9, int_10_19, int_20_99, int_100s],
remainders = [int_و_1_99, int_100s]
)
int_1000000s = MagnitudeIntBuilder(
factor = 1000000,
spec = "[<multiplier>] million [<remainder>]",
multipliers = [int_1_9, int_10_19, int_20_99, int_100s, int_1000s],
remainders = [int_و_1_99, int_100s, int_1000s],
)
#---------------------------------------------------------------------------
class IntegerContent(IntegerContentBase):
builders = [int_0, int_1_9, int_10_19, int_20_99,
int_100s, int_100big, int_1000s, int_1000000s]
class DigitsContent(DigitsContentBase):
digits = [("صفر", "اووه"), "واحد", "اثنان", "ثلاثة", "اربعة",
"خمسة", "ستة", "سبعة", "ثمانية", "تسعة"]<|fim▁end|> | set = [int_1_9, int_10_19, int_20_99],
)
int_100s = MagnitudeIntBuilder(
|
<|file_name|>M5.py<|end_file_name|><|fim▁begin|>props.bf_Shank_Dia = 5.0
#props.bf_Pitch = 0.8 # Coarse
props.bf_Pitch = 0.5 # Fine
props.bf_Crest_Percent = 10<|fim▁hole|>props.bf_Major_Dia = 5.0
props.bf_Minor_Dia = props.bf_Major_Dia - (1.082532 * props.bf_Pitch)
props.bf_Hex_Head_Flat_Distance = 8.0
props.bf_Hex_Head_Height = 3.5
props.bf_Cap_Head_Dia = 8.5
props.bf_Cap_Head_Height = 5.0
props.bf_CounterSink_Head_Dia = 10.4
props.bf_Allen_Bit_Flat_Distance = 4.0
props.bf_Allen_Bit_Depth = 2.5
props.bf_Pan_Head_Dia = 9.5
props.bf_Dome_Head_Dia = 9.5
props.bf_Philips_Bit_Dia = props.bf_Pan_Head_Dia * (1.82 / 5.6)
#props.bf_Phillips_Bit_Depth = Get_Phillips_Bit_Height(props.bf_Philips_Bit_Dia)
props.bf_Hex_Nut_Height = 4.0
props.bf_Hex_Nut_Flat_Distance = 8.0
props.bf_Thread_Length = 10
props.bf_Shank_Length = 0.0<|fim▁end|> | props.bf_Root_Percent = 10 |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![allow(unused_imports)]
#![allow(dead_code)]
#![allow(non_snake_case)]
extern crate crypto as rust_crypto;
extern crate openssl as ssl;
extern crate rand;
extern crate rustc_serialize as serialize;
extern crate hyper;
extern crate num;
pub mod crypto;
pub mod freq;
pub mod math;
pub mod md4;
pub mod sha1;<|fim▁hole|>mod set1;
mod set2;
mod set3;
mod set4;
mod set5;<|fim▁end|> | pub mod util;
pub mod vector;
|
<|file_name|>with-self-in-projection-output-good.rs<|end_file_name|><|fim▁begin|>// build-pass (FIXME(62277): could be check-pass?)
// Regression test related to #56288. Check that a supertrait projection (of
// `Output`) that references `Self` can be ok if it is referencing a projection (of
// `Self::Target`, in this case). Note that we still require the user to manually<|fim▁hole|>trait Base {
type Output;
}
trait Helper: Base<Output=<Self as Helper>::Target> {
type Target;
}
impl Base for u32
{
type Output = i32;
}
impl Helper for u32
{
type Target = i32;
}
fn main() {
let _x: Box<dyn Helper<Target=i32, Output=i32>> = Box::new(2u32);
}<|fim▁end|> | // specify both `Target` and `Output` for now.
|
<|file_name|>065-setselection.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# example setselection.py
import pygtk
pygtk.require('2.0')
import gtk<|fim▁hole|>import time
class SetSelectionExample:
# Callback when the user toggles the selection
def selection_toggled(self, widget, window):
if widget.get_active():
self.have_selection = window.selection_owner_set("PRIMARY")
# if claiming the selection failed, we return the button to
# the out state
if not self.have_selection:
widget.set_active(False)
else:
if self.have_selection:
# Not possible to release the selection in PyGTK
# just mark that we don't have it
self.have_selection = False
return
# Called when another application claims the selection
def selection_clear(self, widget, event):
self.have_selection = False
widget.set_active(False)
return True
# Supplies the current time as the selection.
def selection_handle(self, widget, selection_data, info, time_stamp):
current_time = time.time()
timestr = time.asctime(time.localtime(current_time))
# When we return a single string, it should not be null terminated.
# That will be done for us
selection_data.set_text(timestr, len(timestr))
return
def __init__(self):
self.have_selection = False
# Create the toplevel window
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.set_title("Set Selection")
window.set_border_width(10)
window.connect("destroy", lambda w: gtk.main_quit())
self.window = window
# Create an eventbox to hold the button since it no longer has
# a GdkWindow
eventbox = gtk.EventBox()
eventbox.show()
window.add(eventbox)
# Create a toggle button to act as the selection
selection_button = gtk.ToggleButton("Claim Selection")
eventbox.add(selection_button)
selection_button.connect("toggled", self.selection_toggled, eventbox)
eventbox.connect_object("selection_clear_event", self.selection_clear,
selection_button)
eventbox.selection_add_target("PRIMARY", "STRING", 1)
eventbox.selection_add_target("PRIMARY", "COMPOUND_TEXT", 1)
eventbox.connect("selection_get", self.selection_handle)
selection_button.show()
window.show()
def main():
gtk.main()
return 0
if __name__ == "__main__":
SetSelectionExample()
main()<|fim▁end|> | |
<|file_name|>urlopen.py<|end_file_name|><|fim▁begin|>''' Example dangerous usage of urllib[2] opener functions
The urllib and urllib2 opener functions and object can open http, ftp,
and file urls. Often, the ability to open file urls is overlooked leading
to code that can unexpectedly open files on the local server. This
could be used by an attacker to leak information about the server.
'''
import urllib
import urllib2
# Python 3
import urllib.request
# Six
import six
def test_urlopen():
# urllib
url = urllib.quote('file:///bin/ls')
urllib.urlopen(url, 'blah', 32)
urllib.urlretrieve('file:///bin/ls', '/bin/ls2')
opener = urllib.URLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
opener = urllib.FancyURLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
# urllib2
handler = urllib2.HTTPBasicAuthHandler()
handler.add_password(realm='test',
uri='http://mysite.com',
user='bob')
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
urllib2.urlopen('file:///bin/ls')
urllib2.Request('file:///bin/ls')
# Python 3
urllib.request.urlopen('file:///bin/ls')
urllib.request.urlretrieve('file:///bin/ls', '/bin/ls2')
opener = urllib.request.URLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
opener = urllib.request.FancyURLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
# Six
six.moves.urllib.request.urlopen('file:///bin/ls')<|fim▁hole|> opener = six.moves.urllib.request.URLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
opener = six.moves.urllib.request.FancyURLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')<|fim▁end|> | six.moves.urllib.request.urlretrieve('file:///bin/ls', '/bin/ls2') |
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! Various utility functions useful for writing I/O tests */
#![macro_escape]
use libc;
use os;
use prelude::*;
use std::io::net::ip::*;
use sync::atomic::{AtomicUint, INIT_ATOMIC_UINT, Relaxed};
macro_rules! iotest (
{ fn $name:ident() $b:block $(#[$a:meta])* } => (
mod $name {
#![allow(unused_imports)]
use super::super::*;
use super::*;
use io;
use prelude::*;
use io::*;
use io::fs::*;
use io::test::*;
use io::net::tcp::*;
use io::net::ip::*;
use io::net::udp::*;
#[cfg(unix)]
use io::net::pipe::*;
use io::timer::*;
use io::process::*;
use rt::running_on_valgrind;
use str;
use time::Duration;
fn f() $b
$(#[$a])* #[test] fn green() { f() }
$(#[$a])* #[test] fn native() {
use native;
let (tx, rx) = channel();
native::task::spawn(proc() { tx.send(f()) });
rx.recv();
}
}
)
)
/// Get a port number, starting at 9600, for use in tests
pub fn next_test_port() -> u16 {
static mut next_offset: AtomicUint = INIT_ATOMIC_UINT;
unsafe {
base_port() + next_offset.fetch_add(1, Relaxed) as u16
}
}
/// Get a temporary path which could be the location of a unix socket
pub fn next_test_unix() -> Path {
static mut COUNT: AtomicUint = INIT_ATOMIC_UINT;
// base port and pid are an attempt to be unique between multiple
// test-runners of different configurations running on one
// buildbot, the count is to be unique within this executable.
let string = format!("rust-test-unix-path-{}-{}-{}",
base_port(),
unsafe {libc::getpid()},
unsafe {COUNT.fetch_add(1, Relaxed)});
if cfg!(unix) {
os::tmpdir().join(string)
} else {
Path::new(format!("{}{}", r"\\.\pipe\", string))
}
}
/// Get a unique IPv4 localhost:port pair starting at 9600
pub fn next_test_ip4() -> SocketAddr {
SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: next_test_port() }
}
/// Get a unique IPv6 localhost:port pair starting at 9600
pub fn next_test_ip6() -> SocketAddr {
SocketAddr { ip: Ipv6Addr(0, 0, 0, 0, 0, 0, 0, 1), port: next_test_port() }
}
/*
XXX: Welcome to MegaHack City.
The bots run multiple builds at the same time, and these builds
all want to use ports. This function figures out which workspace
it is running in and assigns a port range based on it.
*/
fn base_port() -> u16 {
let base = 9600u16;
let range = 1000u16;
let bases = [
("32-opt", base + range * 1),
("32-nopt", base + range * 2),
("64-opt", base + range * 3),
("64-nopt", base + range * 4),
("64-opt-vg", base + range * 5),
("all-opt", base + range * 6),
("snap3", base + range * 7),
("dist", base + range * 8)
];
// FIXME (#9639): This needs to handle non-utf8 paths
let path = os::getcwd();
let path_s = path.as_str().unwrap();
let mut final_base = base;
for &(dir, base) in bases.iter() {
if path_s.contains(dir) {
final_base = base;
break;
}
}
return final_base;
}
/// Raises the file descriptor limit when running tests if necessary
pub fn raise_fd_limit() {
unsafe { darwin_fd_limit::raise_fd_limit() }
}
#[cfg(target_os="macos")]
#[allow(non_camel_case_types)]
mod darwin_fd_limit {
/*!
* darwin_fd_limit exists to work around an issue where launchctl on Mac OS X defaults the
* rlimit maxfiles to 256/unlimited. The default soft limit of 256 ends up being far too low
* for our multithreaded scheduler testing, depending on the number of cores available.
*
* This fixes issue #7772.
*/
use libc;
type rlim_t = libc::uint64_t;
#[repr(C)]
struct rlimit {
rlim_cur: rlim_t,
rlim_max: rlim_t<|fim▁hole|> oldp: *mut libc::c_void, oldlenp: *mut libc::size_t,
newp: *mut libc::c_void, newlen: libc::size_t) -> libc::c_int;
fn getrlimit(resource: libc::c_int, rlp: *mut rlimit) -> libc::c_int;
fn setrlimit(resource: libc::c_int, rlp: *const rlimit) -> libc::c_int;
}
static CTL_KERN: libc::c_int = 1;
static KERN_MAXFILESPERPROC: libc::c_int = 29;
static RLIMIT_NOFILE: libc::c_int = 8;
pub unsafe fn raise_fd_limit() {
// The strategy here is to fetch the current resource limits, read the kern.maxfilesperproc
// sysctl value, and bump the soft resource limit for maxfiles up to the sysctl value.
use ptr::null_mut;
use mem::size_of_val;
use os::last_os_error;
// Fetch the kern.maxfilesperproc value
let mut mib: [libc::c_int, ..2] = [CTL_KERN, KERN_MAXFILESPERPROC];
let mut maxfiles: libc::c_int = 0;
let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
if sysctl(&mut mib[0], 2, &mut maxfiles as *mut libc::c_int as *mut libc::c_void, &mut size,
null_mut(), 0) != 0 {
let err = last_os_error();
fail!("raise_fd_limit: error calling sysctl: {}", err);
}
// Fetch the current resource limits
let mut rlim = rlimit{rlim_cur: 0, rlim_max: 0};
if getrlimit(RLIMIT_NOFILE, &mut rlim) != 0 {
let err = last_os_error();
fail!("raise_fd_limit: error calling getrlimit: {}", err);
}
// Bump the soft limit to the smaller of kern.maxfilesperproc and the hard limit
rlim.rlim_cur = ::cmp::min(maxfiles as rlim_t, rlim.rlim_max);
// Set our newly-increased resource limit
if setrlimit(RLIMIT_NOFILE, &rlim) != 0 {
let err = last_os_error();
fail!("raise_fd_limit: error calling setrlimit: {}", err);
}
}
}
#[cfg(not(target_os="macos"))]
mod darwin_fd_limit {
pub unsafe fn raise_fd_limit() {}
}<|fim▁end|> | }
extern {
// name probably doesn't need to be mut, but the C function doesn't specify const
fn sysctl(name: *mut libc::c_int, namelen: libc::c_uint, |
<|file_name|>truncate_fasta_qual_files.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# File created Sept 29, 2010
from __future__ import division
__author__ = "William Walters"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["William Walters", "Greg Caporaso"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "William Walters"
__email__ = "[email protected]"
from os.path import basename
from skbio.parse.sequences import parse_fasta
from qiime.parse import parse_qual_score
def parse_fasta_file(fasta_lines):
""" Parses fasta file, generates dict of label:seq, list of seqs order
fasta_lines: list of lines from fasta file.
"""
fasta_seqs = {}
seq_order = []
for label, seq in parse_fasta(fasta_lines):
fasta_seqs[label.split()[0].strip()] = seq
seq_order.append(label)
return fasta_seqs, seq_order
def verify_equivalency(fasta_seqs,
qual_scores):
""" Tests for equivalent labels, base positions between fasta and qual file
fasta_seqs: dict of label:seq from fasta file
qual_scores: dict of label: qual scores
"""
if len(fasta_seqs) != len(qual_scores):
raise ValueError('Number of sequences not equal in input fasta ' +
'and qual file.')
qual_scores_labels = set(qual_scores.keys())
for label in fasta_seqs.keys():
# Should have equivalent labels
if label not in qual_scores_labels:
raise ValueError('Fasta label %s not found in quality score ' %
label + 'file.')
# should have equivalent lengths
if len(fasta_seqs[label]) != len(qual_scores[label]):
raise ValueError('Sequence %s does not have equivalent ' %
label + 'base positions between fasta and quality score file.')
def truncate_seqs(fasta_seqs,
qual_scores,
base_pos):
""" Truncates sequences to base position specified with base_pos
fasta_seqs: dict of seq label: seq string
qual_scores: dict of seq label: numpy array of int scores
base_pos: index in sequence to truncate at
"""
trunc_fasta_seqs = {}
trunc_qual_scores = {}
for seq in fasta_seqs:
trunc_fasta_seqs[seq] = fasta_seqs[seq][:base_pos]
trunc_qual_scores[seq] = qual_scores[seq][:base_pos]
return trunc_fasta_seqs, trunc_qual_scores
def get_output_filepaths(output_dir,
fasta_fp,
qual_fp):
""" Returns output filepaths for filtered fasta and quality files
output_dir: output directory
fasta_fp: input fasta filepath
qual_fp: input quality scores filepath
"""
if not output_dir.endswith('/'):
output_dir += '/'
fasta_out_fp = output_dir + basename(fasta_fp).split('.')[0] +\
"_filtered.fasta"
qual_out_fp = output_dir + basename(qual_fp).split('.')[0] +\
"_filtered.qual"
return fasta_out_fp, qual_out_fp
def write_trunc_fasta(trunc_fasta_seqs,
fasta_out_fp,
seq_order):
""" Writes truncated fasta seqs in order specified with seq_order
trunc_fasta_seqs: dict of fasta label: truncated sequence string
fasta_out_fp: output filepath to write to
seq_order: list of fasta labels in the order of the original input fasta
"""
fasta_out = open(fasta_out_fp, "w")
for label in seq_order:
trunc_label = label.split()[0].strip()
fasta_out.write(">%s\n%s\n" % (label, trunc_fasta_seqs[trunc_label]))
def write_trunc_qual(trunc_qual_scores,
qual_out_fp,
seq_order):
""" Writes truncated quality score files out in proper format
trunc_qual_scores: dict of seq label: numpy array of scores as ints
qual_out_fp: output filepath to write truncated quality scores to
seq_order: List of full fasta labels to write to output filepath and
maintain the same order as input quality file.
"""
qual_line_size = 60
qual_out = open(qual_out_fp, "w")
for label in seq_order:
trunc_label = label.split()[0].strip()
current_trunc_qual_scores = trunc_qual_scores[trunc_label]
qual_out.write(">%s\n" % label)
current_qual_scores_lines = []
# Quality score format is a string of 60 base calls, followed by a
# newline, until the last N bases are written
for slice in range(0, len(trunc_qual_scores[trunc_label]),
qual_line_size):
# current_segment = map(str,
# current_trunc_qual_scores[slice:slice + qual_line_size])
current_segment = current_trunc_qual_scores[
slice:slice +
qual_line_size]
current_qual_scores_lines.append(" ".join(current_segment))
qual_out.write('\n'.join(current_qual_scores_lines))
qual_out.write('\n')
def truncate_fasta_qual(fasta_fp,
qual_fp,
output_dir,
base_pos):
""" Main program function for generating quality score histogram
fasta_fp: fasta filepath
qual_fp: quality score filepath
output_dir: output directory
base_pos: Nucleotide position to truncate the fasta and quality score at.
"""
qual_lines = open(qual_fp, "U")
fasta_lines = open(fasta_fp, "U")
qual_scores = parse_qual_score(qual_lines, value_cast_f=str)
# Get dict of fasta label:seq, and the sequence order (so output can
# be in the same order as the input sequences.
fasta_seqs, seq_order = parse_fasta_file(fasta_lines)
# Make sure the quality scores and fasta sequences have corresponding
# labels and base numbers
verify_equivalency(fasta_seqs, qual_scores)
# Truncate seqs to base_pos index
trunc_fasta_seqs, trunc_qual_scores = truncate_seqs(fasta_seqs,
qual_scores, base_pos)
<|fim▁hole|> # Write truncated sequences out
write_trunc_fasta(trunc_fasta_seqs, fasta_out_fp, seq_order)
write_trunc_qual(trunc_qual_scores, qual_out_fp, seq_order)<|fim▁end|> | # Get output filepaths
fasta_out_fp, qual_out_fp = get_output_filepaths(output_dir, fasta_fp,
qual_fp)
|
<|file_name|>test_integration.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from sentry.identity.vsts import VSTSIdentityProvider
from sentry.integrations.exceptions import IntegrationError
from sentry.integrations.vsts import VstsIntegration, VstsIntegrationProvider
from sentry.models import (
Integration, IntegrationExternalProject, OrganizationIntegration, Repository,
Project
)
from sentry.plugins import plugins
from tests.sentry.plugins.testutils import VstsPlugin # NOQA
from .testutils import VstsIntegrationTestCase, CREATE_SUBSCRIPTION
class VstsIntegrationProviderTest(VstsIntegrationTestCase):
# Test data setup in ``VstsIntegrationTestCase``
def test_basic_flow(self):
self.assert_installation()
integration = Integration.objects.get(provider='vsts')
assert integration.external_id == self.vsts_account_id
assert integration.name == self.vsts_account_name
metadata = integration.metadata
assert metadata['scopes'] == list(VSTSIdentityProvider.oauth_scopes)
assert metadata['subscription']['id'] == \
CREATE_SUBSCRIPTION['publisherInputs']['tfsSubscriptionId']
assert metadata['domain_name'] == '{}.visualstudio.com'.format(
self.vsts_account_name
)
def test_migrate_repositories(self):
accessible_repo = Repository.objects.create(
organization_id=self.organization.id,
name=self.project_a['name'],
url='https://{}.visualstudio.com/DefaultCollection/_git/{}'.format(
self.vsts_account_name,
self.repo_name,
),
provider='visualstudio',
external_id=self.repo_id,
)
inaccessible_repo = Repository.objects.create(
organization_id=self.organization.id,
name='NotReachable',
url='https://randoaccount.visualstudio.com/Product/_git/NotReachable',
provider='visualstudio',
external_id='123456789',
)
self.assert_installation()
integration = Integration.objects.get(provider='vsts')
assert Repository.objects.get(
id=accessible_repo.id,
).integration_id == integration.id
assert Repository.objects.get(
id=inaccessible_repo.id,
).integration_id is None
def setupPluginTest(self):
self.project = Project.objects.create(
organization_id=self.organization.id,
)
self.plugin = plugins.get('vsts')
self.plugin.enable(self.project)
def test_disabled_plugin_when_fully_migrated(self):
self.setupPluginTest()
Repository.objects.create(
organization_id=self.organization.id,
name=self.project_a['name'],
url='https://{}.visualstudio.com/DefaultCollection/_git/{}'.format(
self.vsts_account_name,
self.repo_name,
),
provider='visualstudio',
external_id=self.repo_id,
)
# Enabled before Integration installation
assert 'vsts' in [p.slug for p in plugins.for_project(self.project)]
self.assert_installation()
# Disabled
assert 'vsts' not in [p.slug for p in plugins.for_project(self.project)]
def test_doesnt_disable_plugin_when_partially_migrated(self):
self.setupPluginTest()
# Repo accessible by new Integration
Repository.objects.create(
organization_id=self.organization.id,
name=self.project_a['name'],
url='https://{}.visualstudio.com/DefaultCollection/_git/{}'.format(
self.vsts_account_name,
self.repo_name,
),
provider='visualstudio',
external_id=self.repo_id,
)
# Inaccessible Repo - causes plugin to stay enabled
Repository.objects.create(
organization_id=self.organization.id,
name='NotReachable',
url='https://randoaccount.visualstudio.com/Product/_git/NotReachable',
provider='visualstudio',
external_id='123456789',
)
self.assert_installation()
# Still enabled
assert 'vsts' in [p.slug for p in plugins.for_project(self.project)]
def test_build_integration(self):
state = {
'account': {
'AccountName': self.vsts_account_name,
'AccountId': self.vsts_account_id,
},
'instance': '{}.visualstudio.com'.format(self.vsts_account_name),
'identity': {
'data': {
'access_token': self.access_token,
'expires_in': '3600',
'refresh_token': self.refresh_token,
'token_type': 'jwt-bearer',
},
},
}
integration = VstsIntegrationProvider()
integration_dict = integration.build_integration(state)
assert integration_dict['name'] == self.vsts_account_name
assert integration_dict['external_id'] == self.vsts_account_id
assert integration_dict['metadata']['domain_name'] == \
'{}.visualstudio.com'.format(self.vsts_account_name)
assert integration_dict['user_identity']['type'] == 'vsts'
assert integration_dict['user_identity']['external_id'] == \
self.vsts_account_id
assert integration_dict['user_identity']['scopes'] == sorted(
VSTSIdentityProvider.oauth_scopes)
def test_webhook_subscription_created_once(self):
self.assert_installation()
state = {
'account': {
'AccountName': self.vsts_account_name,
'AccountId': self.vsts_account_id,
},
'instance': '{}.visualstudio.com'.format(self.vsts_account_name),
'identity': {
'data': {
'access_token': self.access_token,
'expires_in': '3600',
'refresh_token': self.refresh_token,
'token_type': 'jwt-bearer',
},
},
}
# The above already created the Webhook, so subsequent calls to
# ``build_integration`` should omit that data.
data = VstsIntegrationProvider().build_integration(state)
assert 'subscription' not in data['metadata']
def test_fix_subscription(self):
external_id = '1234567890'
Integration.objects.create(
metadata={},
provider='vsts',
external_id=external_id,
)
data = VstsIntegrationProvider().build_integration({
'account': {
'AccountName': self.vsts_account_name,<|fim▁hole|> 'data': {
'access_token': self.access_token,
'expires_in': '3600',
'refresh_token': self.refresh_token,
'token_type': 'jwt-bearer',
},
},
})
assert external_id == data['external_id']
subscription = data['metadata']['subscription']
assert subscription['id'] is not None and subscription['secret'] is not None
class VstsIntegrationTest(VstsIntegrationTestCase):
def test_get_organization_config(self):
self.assert_installation()
integration = Integration.objects.get(provider='vsts')
fields = integration.get_installation(
integration.organizations.first().id
).get_organization_config()
assert [field['name'] for field in fields] == [
'sync_status_forward',
'sync_forward_assignment',
'sync_comments',
'sync_status_reverse',
'sync_reverse_assignment',
]
def test_update_organization_config_remove_all(self):
self.assert_installation()
model = Integration.objects.get(provider='vsts')
integration = VstsIntegration(model, self.organization.id)
org_integration = OrganizationIntegration.objects.get(
organization_id=self.organization.id,
)
data = {
'sync_status_forward': {},
'other_option': 'hello',
}
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=1,
resolved_status='ResolvedStatus1',
unresolved_status='UnresolvedStatus1',
)
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=2,
resolved_status='ResolvedStatus2',
unresolved_status='UnresolvedStatus2',
)
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=3,
resolved_status='ResolvedStatus3',
unresolved_status='UnresolvedStatus3',
)
integration.update_organization_config(data)
external_projects = IntegrationExternalProject.objects \
.all() \
.values_list('external_id', flat=True)
assert list(external_projects) == []
config = OrganizationIntegration.objects.get(
organization_id=org_integration.organization_id,
integration_id=org_integration.integration_id
).config
assert config == {
'sync_status_forward': False,
'other_option': 'hello',
}
def test_update_organization_config(self):
self.assert_installation()
org_integration = OrganizationIntegration.objects.get(
organization_id=self.organization.id,
)
model = Integration.objects.get(provider='vsts')
integration = VstsIntegration(model, self.organization.id)
# test validation
data = {
'sync_status_forward': {
1: {
'on_resolve': '',
'on_unresolve': 'UnresolvedStatus1',
},
},
}
with self.assertRaises(IntegrationError):
integration.update_organization_config(data)
data = {
'sync_status_forward': {
1: {
'on_resolve': 'ResolvedStatus1',
'on_unresolve': 'UnresolvedStatus1',
},
2: {
'on_resolve': 'ResolvedStatus2',
'on_unresolve': 'UnresolvedStatus2',
},
4: {
'on_resolve': 'ResolvedStatus4',
'on_unresolve': 'UnresolvedStatus4',
},
},
'other_option': 'hello',
}
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=1,
resolved_status='UpdateMe',
unresolved_status='UpdateMe',
)
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=2,
resolved_status='ResolvedStatus2',
unresolved_status='UnresolvedStatus2',
)
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=3,
resolved_status='ResolvedStatus3',
unresolved_status='UnresolvedStatus3',
)
integration.update_organization_config(data)
external_projects = IntegrationExternalProject.objects \
.all() \
.order_by('external_id')
assert external_projects[0].external_id == '1'
assert external_projects[0].resolved_status == 'ResolvedStatus1'
assert external_projects[0].unresolved_status == 'UnresolvedStatus1'
assert external_projects[1].external_id == '2'
assert external_projects[1].resolved_status == 'ResolvedStatus2'
assert external_projects[1].unresolved_status == 'UnresolvedStatus2'
assert external_projects[2].external_id == '4'
assert external_projects[2].resolved_status == 'ResolvedStatus4'
assert external_projects[2].unresolved_status == 'UnresolvedStatus4'
config = OrganizationIntegration.objects.get(
organization_id=org_integration.organization_id,
integration_id=org_integration.integration_id
).config
assert config == {
'sync_status_forward': True,
'other_option': 'hello',
}<|fim▁end|> | 'AccountId': external_id,
},
'instance': '{}.visualstudio.com'.format(self.vsts_account_name),
'identity': { |
<|file_name|>script.js<|end_file_name|><|fim▁begin|>var leaderboard2 = function(data) {
return data.data.sort(function(a,b){return b.points-a.points;});
};
function liCreate(name,points) {
var li = $('<li>'+name+'</li>');
li.append('<span>'+points+'</span>');
}
$(document).ready(function() {
// var sorted = leaderboard2(data);
// for (var i=0; i<sorted.length; i++) {
// $('body').append('<div><p>'+sorted[i].name+' '+sorted[i].points+'</p></div>')
// }
// //problem here is with repeated DOM manipulation
var studentArray = [];
$.getJSON('http://192.168.1.35:8000/data.json').success(function(data){
//using '$.getJSON' as opposed to $.ajax specifies
//also, include both success and error handler to account for asynchrony.<|fim▁hole|> var student = ('<div><p>'+sorted[i].name+' '+sorted[i].points+'</p></div>');
studentArray.push(student);
}
//^^ FIXED!!! Append entire array ^^
console.log(studentArray);
$('body').append(studentArray);
})
.error(function(error){
console.log(error);
});
});<|fim▁end|> | //i.e., if/when SUCCESS, execute some code block; if ERROR, execute another.
console.log(data);
var sorted = leaderboard2(data);
for (var i=0; i<sorted.length; i++) { |
<|file_name|>rezkonvimporter.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
* Copyright © 2003 Jason Kivlighn <[email protected]> *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
***************************************************************************/
#include "rezkonvimporter.h"<|fim▁hole|>#include <kdebug.h>
#include <QFile>
#include <QRegExp>
#include <QTextStream>
#include "datablocks/mixednumber.h"
RezkonvImporter::RezkonvImporter() : BaseImporter()
{}
RezkonvImporter::~RezkonvImporter()
{}
void RezkonvImporter::parseFile( const QString &filename )
{
QFile input( filename );
if ( input.open( QIODevice::ReadOnly ) ) {
QTextStream stream( &input );
stream.skipWhiteSpace();
QString line;
while ( !stream.atEnd() ) {
line = stream.readLine();
if ( line.contains( QRegExp( "^=====.*REZKONV.*", Qt::CaseInsensitive ) ) ) {
QStringList raw_recipe;
while ( !( line = stream.readLine() ).contains( QRegExp( "^=====\\s*$" ) ) && !stream.atEnd() )
raw_recipe << line;
readRecipe( raw_recipe );
}
}
if ( fileRecipeCount() == 0 )
setErrorMsg( i18n( "No recipes found in this file." ) );
}
else
setErrorMsg( i18n( "Unable to open file." ) );
}
void RezkonvImporter::readRecipe( const QStringList &raw_recipe )
{
kapp->processEvents(); //don't want the user to think its frozen... especially for files with thousands of recipes
Recipe recipe;
QStringList::const_iterator text_it = raw_recipe.begin();
m_end_it = raw_recipe.end();
//title (Titel)
text_it++;
recipe.title = ( *text_it ).mid( ( *text_it ).indexOf( ":" ) + 1, ( *text_it ).length() ).trimmed();
kDebug() << "Found title: " << recipe.title ;
//categories (Kategorien):
text_it++;
QStringList categories;
if ( ( *text_it ).mid( ( *text_it ).indexOf( ":" ) + 1, ( *text_it ).length() ).isEmpty() )
categories = QStringList();
else
categories = ( *text_it ).mid( ( *text_it ).indexOf( ":" ) + 1, ( *text_it ).length() ).split( ',', QString::SkipEmptyParts );
for ( QStringList::const_iterator it = categories.constBegin(); it != categories.constEnd(); ++it ) {
Element new_cat;
new_cat.name = QString( *it ).trimmed();
kDebug() << "Found category: " << new_cat.name ;
recipe.categoryList.append( new_cat );
}
//yield (Menge)
text_it++;
//get the number between the ":" and the next space after it
QString yield_str = ( *text_it ).trimmed();
yield_str.remove( QRegExp( "^Menge:\\s*" ) );
int sep_index = yield_str.indexOf( ' ' );
if ( sep_index != -1 )
recipe.yield.setType(yield_str.mid( sep_index+1 ));
double amount = 0.0, amountOffset = 0.0;
readRange( yield_str.mid( 0, sep_index ), amount, amountOffset );
recipe.yield.setAmount(amount);
recipe.yield.setAmountOffset(amountOffset);
kDebug() << "Found yield: " << recipe.yield.amount();
bool is_sub = false;
bool last_line_empty = false;
text_it++;
while ( text_it != raw_recipe.end() ) {
if ( ( *text_it ).isEmpty() ) {
last_line_empty = true;
text_it++;
continue;
}
if ( ( *text_it ).contains( QRegExp( "^=====.*=$" ) ) ) //is a header
{
if ( ( *text_it ).contains( "quelle", Qt::CaseInsensitive ) )
{
loadReferences( text_it, recipe );
break; //reference lines are the last before the instructions
}
else
loadIngredientHeader( *text_it, recipe );
}
//if it has no more than two spaces followed by a non-digit
//then we'll assume it is a direction line
else if ( last_line_empty && ( *text_it ).contains( QRegExp( "^\\s{0,2}[^\\d\\s=]" ) ) )
break;
else
loadIngredient( *text_it, recipe, is_sub );
last_line_empty = false;
text_it++;
}
loadInstructions( text_it, recipe );
add
( recipe );
current_header.clear();
}
void RezkonvImporter::loadIngredient( const QString &string, Recipe &recipe, bool &is_sub )
{
Ingredient new_ingredient;
new_ingredient.amount = 0; //amount not required, so give default of 0
QRegExp cont_test( "^-{1,2}" );
if ( string.trimmed().contains( cont_test ) ) {
QString name = string.trimmed();
name.remove( cont_test );
kDebug() << "Appending to last ingredient: " << name ;
if ( !recipe.ingList.isEmpty() ) //so it doesn't crash when the first ingredient appears to be a continuation of another
recipe.ingList.last().name += ' ' + name;
return ;
}
//amount
if ( !string.mid( 0, 7 ).trimmed().isEmpty() )
readRange( string.mid( 0, 7 ), new_ingredient.amount, new_ingredient.amount_offset );
//unit
QString unit_str = string.mid( 8, 9 ).trimmed();
new_ingredient.units = Unit( unit_str, new_ingredient.amount );
//name and preparation method
new_ingredient.name = string.mid( 18, string.length() - 18 ).trimmed();
//separate out the preparation method
QString name_and_prep = new_ingredient.name;
int separator_index = name_and_prep.indexOf( "," );
if ( separator_index != -1 ) {
new_ingredient.name = name_and_prep.mid( 0, separator_index ).trimmed();
new_ingredient.prepMethodList = ElementList::split(",",name_and_prep.mid( separator_index + 1, name_and_prep.length() ).trimmed() );
}
//header (if present)
new_ingredient.group = current_header;
bool last_is_sub = is_sub;
if ( !new_ingredient.prepMethodList.isEmpty() &&
new_ingredient.prepMethodList.last().name == "or" ) {
new_ingredient.prepMethodList.pop_back();
is_sub = true;
}
else
is_sub = false;
if ( last_is_sub )
recipe.ingList.last().substitutes.append(new_ingredient);
else
recipe.ingList.append( new_ingredient );
}
void RezkonvImporter::loadIngredientHeader( const QString &string, Recipe &/*recipe*/ )
{
QString header = string;
header.remove( QRegExp( "^=*" ) ).remove( QRegExp( "=*$" ) );
header = header.trimmed();
kDebug() << "found ingredient header: " << header ;
current_header = header;
}
void RezkonvImporter::loadInstructions( QStringList::const_iterator &text_it, Recipe &recipe )
{
QString instr;
QRegExp rx_title( "^:{0,1}\\s*O-Titel\\s*:" );
QString line;
text_it++;
while ( text_it != m_end_it ) {
line = *text_it;
//titles longer than the line width are rewritten here
if ( line.contains( rx_title ) ) {
line.remove( rx_title );
recipe.title = line.trimmed();
QRegExp rx_line_cont( ":\\s*>{0,1}\\s*:" );
while ( ( line = *text_it ).contains( rx_line_cont ) ) {
line.remove( rx_line_cont );
recipe.title += line;
text_it++;
}
kDebug() << "Found long title: " << recipe.title ;
}
else {
if ( line.trimmed().isEmpty() && ( (text_it+1) != m_end_it ) )
instr += "\n\n";
instr += line.trimmed();
}
text_it++;
}
recipe.instructions = instr;
}
void RezkonvImporter::loadReferences( QStringList::const_iterator &text_it, Recipe &recipe )
{
kDebug() << "Found source header" ;
text_it++;
while ( text_it != m_end_it && !text_it->trimmed().isEmpty() ) {
QRegExp rx_line_begin( "^\\s*-{0,2}\\s*" );
QRegExp rx_creation_date = QRegExp( "^\\s*-{0,2}\\s*Erfasst \\*RK\\*", Qt::CaseInsensitive );
if ( ( *text_it ).contains( rx_creation_date ) ) // date followed by typist
{
QString date = *text_it;
date.remove( rx_creation_date ).remove( QRegExp( " von\\s*$" ) );
// Date is given as DD.MM.YY
QString s = date.section( '.', 0, 0 );
int day = s.toInt();
s = date.section( '.', 1, 1 );
int month = s.toInt();
s = date.section( '.', 2, 2 );
int year = s.toInt();
year += 1900;
if ( year < 1970 )
year += 100; //we'll assume nothing has been created before 1970 (y2k issues :p)
recipe.ctime = QDateTime(QDate(year,month,day));
#if 0
//typist
text_it++;
QString typist = = *text_it;
typist.remove( rx_line_begin );
#endif
}
else //everything else is an author
{
if ( ( *text_it ).contains( rx_line_begin ) ) {
QString author = *text_it;
author.remove( rx_line_begin );
recipe.authorList.append( Element( author ) );
}
else
break;
}
text_it++;
}
}
void RezkonvImporter::readRange( const QString &range_str, double &amount, double &amount_offset )
{
QString from = range_str.section( '-', 0, 0 );
QString to = range_str.section( '-', 1, 1 );
MixedNumber number;
MixedNumber::fromString( from, number, false);
amount = number.toDouble();
if ( !to.trimmed().isEmpty() ) {
MixedNumber::fromString( to, number, false );
amount_offset = number.toDouble() - amount;
}
}<|fim▁end|> |
#include <kapplication.h>
#include <klocale.h> |
<|file_name|>TestRe.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import re
test = '用户输入的字符串'
if re.match(r'用户', test):
print('ok')
else:
print('failed')
print('a b c'.split(' '))
print(re.split(r'\s*', 'a b c'))
print(re.split(r'[\s\,\;]+', 'a,b;; c d'))
m = re.match(r'^(\d{3})-(\d{3,8})$', '010-12345')
print(m.group(1))
m = re.match(r'^(\S+)@(\S+.com)$', '[email protected]')<|fim▁hole|># <Tom Paris> tom@voyager .org
re_mail = re.compile(r'<(\S+)\s+(\S+)>\s+(\S+)@(\S+.org)')
print(re_mail.match('<Tom Paris> [email protected]').groups())
str = 'abcbacba'
# non-greed match
re = re.compile(r'a.*?a', re.S)
print(re.match(str).group())<|fim▁end|> | print(m.group(2))
print(m.groups()) |
<|file_name|>regions-close-over-type-parameter-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
// Test for what happens when a type parameter `A` is closed over into
// an object. This should yield errors unless `A` (and the object)
// both have suitable bounds.
trait Foo { fn get(&self); }
impl<A> Foo for A {
fn get(&self) { }
}
fn repeater3<'a,A:'a>(v: A) -> Box<Foo+'a> {
box v as Box<Foo+'a>
}
fn main() {<|fim▁hole|> // ~Repeat<&'blk int> where blk is the lifetime of the block below.
let _ = {
let tmp0 = 3i;
let tmp1 = &tmp0; //~ ERROR `tmp0` does not live long enough
repeater3(tmp1)
};
}<|fim▁end|> | // Error results because the type of is inferred to be |
<|file_name|>t2-random-walks.py<|end_file_name|><|fim▁begin|>"""
Trabalho T2 da disciplina Teoria dos Grafos, ministrada em 2014/02
'All Hail Gabe Newell'
Alunos:
Daniel Nobusada 344443
Thales Eduardo Adair Menato 407976
Jorge Augusto Bernardo 407844
"""
import networkx as nx
import numpy as np
import plotly.plotly as py
from plotly.graph_objs import *
py.sign_in("thamenato", "aq0t3czzut")
# Importa grafo Zachary's Karate Club
graphG = nx.read_gml('karate.gml')
"""
1) Computacao da distribuicao estacionaria teorica (steady state) do grafo
w(i) = d(vi) / 2|E|
"""
w_real = []
for i in graphG.nodes_iter():
aux = float(graphG.degree(i)) / float((2 * graphG.number_of_edges()))
w_real.append(aux)
"""
2) Calcular The Power Method
http://college.cengage.com/mathematics/larson/elementary_linear/4e/shared/
downloads/c10s3.pdf
"""
# Matriz P recebe a matriz de adjacencia de matrixG
matrixP = nx.adjacency_matrix(graphG)
# A soma de cada linha eh calculado
sum_linha = []
for i in matrixP:
sum_linha.append(i.sum())
# Para cada p(i,j) de P temos p(i,j) = p(i,j)/sum_linha(i)
for i in range(0, matrixP.shape[0]):
for j in range(0, matrixP.shape[1]):
matrixP[i, j] = float(matrixP[i, j]) / float(sum_linha[i])
# Vetor w_inicial onde a soma eh 1 com divisao de probabilidade 1/G.order()
# Para o grafo utilizado G.order() = 34
w_inicial = np.array([1.0/float(graphG.order())
for i in range(0, graphG.order())])
# Calcular w_power5
w_power5 = np.dot(w_inicial, matrixP)
for i in range(0, 4):
w_power5 = np.dot(w_power5, matrixP)
# Calcular w_power100
w_power100 = np.dot(w_inicial, matrixP)
for i in range(0, 99):
w_power100 = np.dot(w_power100, matrixP)
# A soma de todos os elementos destes vetores eh 1
"""
3) Escolha de 2 vertices distintos e realizar a caminhada aleatoria de ambos
"""
# Funcao Random Walk
def random_walk(node, numPassos):
# Vetor contendo numero de posicoes = numeros de vertices(noh)
caminhada = [0.0 for i in range(0, graphG.number_of_nodes())]
# Para o numero de passos desejado, uma lista contendo os vizinhos sera armazenada
# um indice aleatorio desta lista eh selecionado como proximo noh que entao passa
# a ser o noh atual e numero de visitar naquele noh eh incrementado
for i in range(0, numPassos):
vizinhos = graphG.neighbors(node)
proxNo = vizinhos[np.random.randint(0, len(vizinhos))]
node = proxNo
caminhada[node-1] += 1
# Realiza a divisao pelo numero de passos em todos os numeros de lista
for i in range(0, len(caminhada)):
caminhada[i] /= numPassos
# Retorna vetor contendo o numero de passadas / num de passos em cada vertice (noh)
return caminhada
# Escolha de dois vertices (noh) aleatorios
nodeA = np.random.random_integers(1, graphG.number_of_nodes())
nodeB = np.random.random_integers(1, graphG.number_of_nodes())
# Caso vertice B seja igual a A, receber outros numeros ateh que sejam distintos
while nodeB is nodeA:
nodeB = np.random.random_integers(1, graphG.number_of_nodes())
# 2 caminhadas aleatorias de tamanho N = 100
w_random100a = random_walk(nodeA, 100)
w_random100b = random_walk(nodeB, 100)
# 2 caminhadas aleatorias de tamanho N = 10000
w_random10000a = random_walk(nodeA, 10000)
w_random10000b = random_walk(nodeB, 10000)
# Print no console de todos os dados obtidos
print "w_power5: "
w_power5_lista = []
for i in range(0, w_power5.size):
w_power5_lista.append('%.4f'%w_power5[0, i])
print w_power5_lista
print "w_power100: "
w_power100_lista = []
for i in range(0, w_power100.size):
w_power100_lista.append('%.4f'%w_power100[0, i])
print w_power100_lista
print "w_random100a:"
print w_random100a
print "w_random100b:"
print w_random100b
print "w_random10000a:"
print w_random10000a
print "w_random10000b:"
print w_random10000b
# Para plotar no link: https://plot.ly/~thamenato/2/t2-random-walk/
# basta descomentar e executar o codigo novamente
# Tem de instalar a biblioteca (https://plot.ly/python/getting-started/)
# no Windows eh soh abrir o menu do Python(x,y) e escolher interactive consoles: IPython(sh)
# e executar: pip install plotly
"""
trace_power5 = Bar(
x = graphG.nodes(),
y = np.squeeze(np.asarray(w_power5)),
name = 'w_power5',
marker = Marker(
color='rgb(51,102,255)'
)
)
trace_power100 = Bar(
x = graphG.nodes(),
y = np.squeeze(np.asarray(w_power100)),
name = 'w_power100',
marker = Marker(
color='rgb(0,184,245)'
)
)
trace_random100a = Bar(
x = graphG.nodes(),
y = np.squeeze(np.asarray(w_random100a)),
name = 'w_random100a',
marker = Marker(
color='rgb(138,184,0)'
)<|fim▁hole|> name = 'w_random100b',
marker = Marker(
color='rgb(184,245,0)'
)
)
trace_random10000a = Bar(
x = graphG.nodes(),
y = np.squeeze(np.asarray(w_random10000a)),
name = 'w_random10000a',
marker = Marker(
color='rgb(245,184,0)'
)
)
trace_random10000b = Bar(
x = graphG.nodes(),
y = np.squeeze(np.asarray(w_random10000b)),
name = 'w_random10000b',
marker = Marker(
color='rgb(255,102,51)'
)
)
data = Data([trace_power5, trace_power100, trace_random100a,
trace_random100b, trace_random10000a, trace_random10000b])
layout = Layout(
title = 'T2: Random Walk',
xaxis = XAxis(
title = 'Nodes',
titlefont = Font(
size = 16,
color = 'rgb(107, 107, 107)'
),
tickfont = Font(
size = 14,
color = 'rgb(107, 107, 107)'
)
),
yaxis = YAxis(
title = 'Probability',
titlefont = Font(
size = 16,
color = 'rgb(107, 107, 107)'
),
tickfont = Font(
size = 14,
color = 'rgb(107, 107, 107)'
)
),
legend = Legend(
x = 0.25,
y = 1.0,
bgcolor = 'rgba(255, 255, 255, 0)',
bordercolor = 'rgba(255, 255, 255, 0)'
),
barmode = 'group',
bargap = 0.15,
bargroupgap = 0.1
)
fig = Figure(data = data, layout = layout)
plot_url = py.plot(fig, filename='T2_Random_Walks')
"""<|fim▁end|> | )
trace_random100b = Bar(
x = graphG.nodes(),
y = np.squeeze(np.asarray(w_random100b)), |
<|file_name|>mock_peernet.go<|end_file_name|><|fim▁begin|>package mocknet
import (
"fmt"
"math/rand"
"sync"
ic "github.com/jbenet/go-ipfs/p2p/crypto"
inet "github.com/jbenet/go-ipfs/p2p/net"
peer "github.com/jbenet/go-ipfs/p2p/peer"
context "github.com/jbenet/go-ipfs/Godeps/_workspace/src/code.google.com/p/go.net/context"
ctxgroup "github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/jbenet/go-ctxgroup"
ma "github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/jbenet/go-multiaddr"
)
// peernet implements inet.Network
type peernet struct {
mocknet *mocknet // parent
peer peer.ID
ps peer.Peerstore
// conns are actual live connections between peers.
// many conns could run over each link.
// **conns are NOT shared between peers**
connsByPeer map[peer.ID]map[*conn]struct{}
connsByLink map[*link]map[*conn]struct{}
// implement inet.Network
streamHandler inet.StreamHandler
connHandler inet.ConnHandler
cg ctxgroup.ContextGroup
sync.RWMutex
}
// newPeernet constructs a new peernet
func newPeernet(ctx context.Context, m *mocknet, k ic.PrivKey,
a ma.Multiaddr) (*peernet, error) {
p, err := peer.IDFromPublicKey(k.GetPublic())
if err != nil {
return nil, err
}
// create our own entirely, so that peers knowledge doesn't get shared
ps := peer.NewPeerstore()
ps.AddAddress(p, a)
ps.AddPrivKey(p, k)
ps.AddPubKey(p, k.GetPublic())
n := &peernet{
mocknet: m,
peer: p,
ps: ps,
cg: ctxgroup.WithContext(ctx),
connsByPeer: map[peer.ID]map[*conn]struct{}{},
connsByLink: map[*link]map[*conn]struct{}{},
}
n.cg.SetTeardown(n.teardown)
return n, nil<|fim▁hole|>
func (pn *peernet) teardown() error {
// close the connections
for _, c := range pn.allConns() {
c.Close()
}
return nil
}
// allConns returns all the connections between this peer and others
func (pn *peernet) allConns() []*conn {
pn.RLock()
var cs []*conn
for _, csl := range pn.connsByPeer {
for c := range csl {
cs = append(cs, c)
}
}
pn.RUnlock()
return cs
}
// Close calls the ContextCloser func
func (pn *peernet) Close() error {
return pn.cg.Close()
}
func (pn *peernet) Peerstore() peer.Peerstore {
return pn.ps
}
func (pn *peernet) String() string {
return fmt.Sprintf("<mock.peernet %s - %d conns>", pn.peer, len(pn.allConns()))
}
// handleNewStream is an internal function to trigger the client's handler
func (pn *peernet) handleNewStream(s inet.Stream) {
pn.RLock()
handler := pn.streamHandler
pn.RUnlock()
if handler != nil {
go handler(s)
}
}
// handleNewConn is an internal function to trigger the client's handler
func (pn *peernet) handleNewConn(c inet.Conn) {
pn.RLock()
handler := pn.connHandler
pn.RUnlock()
if handler != nil {
go handler(c)
}
}
// DialPeer attempts to establish a connection to a given peer.
// Respects the context.
func (pn *peernet) DialPeer(ctx context.Context, p peer.ID) (inet.Conn, error) {
return pn.connect(p)
}
func (pn *peernet) connect(p peer.ID) (*conn, error) {
// first, check if we already have live connections
pn.RLock()
cs, found := pn.connsByPeer[p]
pn.RUnlock()
if found && len(cs) > 0 {
for c := range cs {
return c, nil
}
}
log.Debugf("%s (newly) dialing %s", pn.peer, p)
// ok, must create a new connection. we need a link
links := pn.mocknet.LinksBetweenPeers(pn.peer, p)
if len(links) < 1 {
return nil, fmt.Errorf("%s cannot connect to %s", pn.peer, p)
}
// if many links found, how do we select? for now, randomly...
// this would be an interesting place to test logic that can measure
// links (network interfaces) and select properly
l := links[rand.Intn(len(links))]
log.Debugf("%s dialing %s openingConn", pn.peer, p)
// create a new connection with link
c := pn.openConn(p, l.(*link))
return c, nil
}
func (pn *peernet) openConn(r peer.ID, l *link) *conn {
lc, rc := l.newConnPair(pn)
log.Debugf("%s opening connection to %s", pn.LocalPeer(), lc.RemotePeer())
pn.addConn(lc)
rc.net.remoteOpenedConn(rc)
return lc
}
func (pn *peernet) remoteOpenedConn(c *conn) {
log.Debugf("%s accepting connection from %s", pn.LocalPeer(), c.RemotePeer())
pn.addConn(c)
pn.handleNewConn(c)
}
// addConn constructs and adds a connection
// to given remote peer over given link
func (pn *peernet) addConn(c *conn) {
pn.Lock()
defer pn.Unlock()
cs, found := pn.connsByPeer[c.RemotePeer()]
if !found {
cs = map[*conn]struct{}{}
pn.connsByPeer[c.RemotePeer()] = cs
}
pn.connsByPeer[c.RemotePeer()][c] = struct{}{}
cs, found = pn.connsByLink[c.link]
if !found {
cs = map[*conn]struct{}{}
pn.connsByLink[c.link] = cs
}
pn.connsByLink[c.link][c] = struct{}{}
}
// removeConn removes a given conn
func (pn *peernet) removeConn(c *conn) {
pn.Lock()
defer pn.Unlock()
cs, found := pn.connsByLink[c.link]
if !found || len(cs) < 1 {
panic("attempting to remove a conn that doesnt exist")
}
delete(cs, c)
cs, found = pn.connsByPeer[c.remote]
if !found {
panic("attempting to remove a conn that doesnt exist")
}
delete(cs, c)
}
// CtxGroup returns the network's ContextGroup
func (pn *peernet) CtxGroup() ctxgroup.ContextGroup {
return pn.cg
}
// LocalPeer the network's LocalPeer
func (pn *peernet) LocalPeer() peer.ID {
return pn.peer
}
// Peers returns the connected peers
func (pn *peernet) Peers() []peer.ID {
pn.RLock()
defer pn.RUnlock()
peers := make([]peer.ID, 0, len(pn.connsByPeer))
for _, cs := range pn.connsByPeer {
for c := range cs {
peers = append(peers, c.remote)
break
}
}
return peers
}
// Conns returns all the connections of this peer
func (pn *peernet) Conns() []inet.Conn {
pn.RLock()
defer pn.RUnlock()
out := make([]inet.Conn, 0, len(pn.connsByPeer))
for _, cs := range pn.connsByPeer {
for c := range cs {
out = append(out, c)
}
}
return out
}
func (pn *peernet) ConnsToPeer(p peer.ID) []inet.Conn {
pn.RLock()
defer pn.RUnlock()
cs, found := pn.connsByPeer[p]
if !found || len(cs) == 0 {
return nil
}
var cs2 []inet.Conn
for c := range cs {
cs2 = append(cs2, c)
}
return cs2
}
// ClosePeer connections to peer
func (pn *peernet) ClosePeer(p peer.ID) error {
pn.RLock()
cs, found := pn.connsByPeer[p]
pn.RUnlock()
if !found {
return nil
}
for c := range cs {
c.Close()
}
return nil
}
// BandwidthTotals returns the total amount of bandwidth transferred
func (pn *peernet) BandwidthTotals() (in uint64, out uint64) {
// need to implement this. probably best to do it in swarm this time.
// need a "metrics" object
return 0, 0
}
// ListenAddresses returns a list of addresses at which this network listens.
func (pn *peernet) ListenAddresses() []ma.Multiaddr {
return pn.Peerstore().Addresses(pn.LocalPeer())
}
// InterfaceListenAddresses returns a list of addresses at which this network
// listens. It expands "any interface" addresses (/ip4/0.0.0.0, /ip6/::) to
// use the known local interfaces.
func (pn *peernet) InterfaceListenAddresses() ([]ma.Multiaddr, error) {
return pn.ListenAddresses(), nil
}
// Connectedness returns a state signaling connection capabilities
// For now only returns Connecter || NotConnected. Expand into more later.
func (pn *peernet) Connectedness(p peer.ID) inet.Connectedness {
pn.Lock()
defer pn.Unlock()
cs, found := pn.connsByPeer[p]
if found && len(cs) > 0 {
return inet.Connected
}
return inet.NotConnected
}
// NewStream returns a new stream to given peer p.
// If there is no connection to p, attempts to create one.
func (pn *peernet) NewStream(p peer.ID) (inet.Stream, error) {
pn.Lock()
cs, found := pn.connsByPeer[p]
if !found || len(cs) < 1 {
pn.Unlock()
return nil, fmt.Errorf("no connection to peer")
}
pn.Unlock()
// if many conns are found, how do we select? for now, randomly...
// this would be an interesting place to test logic that can measure
// links (network interfaces) and select properly
n := rand.Intn(len(cs))
var c *conn
for c = range cs {
if n == 0 {
break
}
n--
}
return c.NewStream()
}
// SetStreamHandler sets the new stream handler on the Network.
// This operation is threadsafe.
func (pn *peernet) SetStreamHandler(h inet.StreamHandler) {
pn.Lock()
pn.streamHandler = h
pn.Unlock()
}
// SetConnHandler sets the new conn handler on the Network.
// This operation is threadsafe.
func (pn *peernet) SetConnHandler(h inet.ConnHandler) {
pn.Lock()
pn.connHandler = h
pn.Unlock()
}<|fim▁end|> | } |
<|file_name|>ModuleAudioTest.java<|end_file_name|><|fim▁begin|>package tranquvis.simplesmsremote.CommandManagement.Modules;
/**<|fim▁hole|>}<|fim▁end|> | * Created by Andreas Kaltenleitner on 31.10.2016.
*/
public class ModuleAudioTest extends ModuleTest { |
<|file_name|>azure_rm_servicebusqueue.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright (c) 2018 Yuwei Zhou, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_servicebusqueue
version_added: "2.8"
short_description: Manage Azure Service Bus queue.
description:
- Create, update or delete an Azure Service Bus queue.
options:
resource_group:
description:
- name of resource group.
required: true
name:
description:
- name of the queue.
required: true
namespace:
description:
- Servicebus namespace name.
- A namespace is a scoping container for all messaging components.
- Multiple queues and topics can reside within a single namespace, and namespaces often serve as application containers.
required: true
state:
description:
- Assert the state of the queue. Use 'present' to create or update and
'absent' to delete.
default: present
choices:
- absent
- present
auto_delete_on_idle_in_seconds:
description:
- Time idle interval after which a queue is automatically deleted.
- The minimum duration is 5 minutes.
type: int
dead_lettering_on_message_expiration:
description:
- A value that indicates whether a queue has dead letter support when a message expires.
type: bool
default_message_time_to_live_seconds:
description:
- Default message timespan to live value.
- This is the duration after which the message expires, starting from when the message is sent to Service Bus.
- This is the default value used when TimeToLive is not set on a message itself.
type: int
enable_batched_operations:
description:
- Value that indicates whether server-side batched operations are enabled.
type: bool
enable_express:
description:
- Value that indicates whether Express Entities are enabled.
- An express topic or queue holds a message in memory temporarily before writing it to persistent storage.
type: bool
enable_partitioning:
description:
- A value that indicates whether the topic or queue is to be partitioned across multiple message brokers.
type: bool
forward_dead_lettered_messages_to:
description:
- Queue or topic name to forward the Dead Letter message for a queue.
forward_to:
description:
- Queue or topic name to forward the messages for a queue.
lock_duration_in_seconds:
description:
- Timespan duration of a peek-lock.
- The amount of time that the message is locked for other receivers.
- The maximum value for LockDuration is 5 minutes.
type: int
max_delivery_count:
description:
- he maximum delivery count.
- A message is automatically deadlettered after this number of deliveries.
type: int
max_size_in_mb:
description:
- The maximum size of the queue in megabytes, which is the size of memory allocated for the queue.
type: int
requires_duplicate_detection:
description:
- A value indicating if this queue or topic requires duplicate detection.
type: bool
duplicate_detection_time_in_seconds:
description:
- TimeSpan structure that defines the duration of the duplicate detection history.
type: int
requires_session:
description:
- A value that indicates whether the queue supports the concept of sessions.
type: bool
status:
description:
- Status of the entity.
choices:
- active
- disabled
- send_disabled
- receive_disabled
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Yuwei Zhou (@yuwzho)"
'''
EXAMPLES = '''
- name: Create a queue
azure_rm_servicebusqueue:
name: subqueue
resource_group: myResourceGroup
namespace: bar
duplicate_detection_time_in_seconds: 600
'''
RETURN = '''
id:
description: Current state of the queue.
returned: success
type: str
'''
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.common.dict_transformations import _snake_to_camel, _camel_to_snake
from ansible.module_utils._text import to_native
from datetime import datetime, timedelta
duration_spec_map = dict(
default_message_time_to_live='default_message_time_to_live_seconds',
duplicate_detection_history_time_window='duplicate_detection_time_in_seconds',
auto_delete_on_idle='auto_delete_on_idle_in_seconds',
lock_duration='lock_duration_in_seconds'
)
sas_policy_spec = dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
name=dict(type='str', required=True),
regenerate_key=dict(type='bool'),
rights=dict(type='str', choices=['manage', 'listen', 'send', 'listen_send'])
)
class AzureRMServiceBusQueue(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
namespace=dict(type='str', required=True),
auto_delete_on_idle_in_seconds=dict(type='int'),
dead_lettering_on_message_expiration=dict(type='bool'),
default_message_time_to_live_seconds=dict(type='int'),
duplicate_detection_time_in_seconds=dict(type='int'),
enable_batched_operations=dict(type='bool'),
enable_express=dict(type='bool'),
enable_partitioning=dict(type='bool'),
forward_dead_lettered_messages_to=dict(type='str'),
forward_to=dict(type='str'),
lock_duration_in_seconds=dict(type='int'),
max_delivery_count=dict(type='int'),
max_size_in_mb=dict(type='int'),
requires_duplicate_detection=dict(type='bool'),
requires_session=dict(type='bool'),
status=dict(type='str',
choices=['active', 'disabled', 'send_disabled', 'receive_disabled'])
)
self.resource_group = None
self.name = None
self.state = None
self.namespace = None
self.location = None
self.type = None
self.subscription_topic_name = None
self.auto_delete_on_idle_in_seconds = None
self.dead_lettering_on_message_expiration = None
self.default_message_time_to_live_seconds = None
self.enable_batched_operations = None
self.enable_express = None
self.enable_partitioning = None
self.forward_dead_lettered_messages_to = None
self.forward_to = None
self.lock_duration_in_seconds = None
self.max_delivery_count = None
self.max_size_in_mb = None
self.requires_duplicate_detection = None
self.status = None
self.results = dict(
changed=False,
id=None
)
super(AzureRMServiceBusQueue, self).__init__(self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
setattr(self, key, kwargs[key])
changed = False
original = self.get()
if self.state == 'present':
# Create the resource instance
params = dict(
dead_lettering_on_message_expiration=self.dead_lettering_on_message_expiration,
enable_batched_operations=self.enable_batched_operations,
enable_express=self.enable_express,
enable_partitioning=self.enable_partitioning,
forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to,
forward_to=self.forward_to,
max_delivery_count=self.max_delivery_count,
max_size_in_megabytes=self.max_size_in_mb
)
if self.status:
params['status'] = self.servicebus_models.EntityStatus(str.capitalize(_snake_to_camel(self.status)))
for k, v in duration_spec_map.items():
seconds = getattr(self, v)
if seconds:
params[k] = timedelta(seconds=seconds)
instance = self.servicebus_models.SBQueue(**params)
result = original
if not original:
changed = True
result = instance
else:
result = original
attribute_map = set(self.servicebus_models.SBQueue._attribute_map.keys()) - set(self.servicebus_models.SBQueue._validation.keys())
for attribute in attribute_map:
value = getattr(instance, attribute)
if value and value != getattr(original, attribute):
changed = True
if changed and not self.check_mode:
result = self.create_or_update(instance)
self.results = self.to_dict(result)
elif original:
changed = True
if not self.check_mode:
self.delete()
self.results['deleted'] = True
self.results['changed'] = changed
return self.results
def create_or_update(self, param):
try:
client = self._get_client()
return client.create_or_update(self.resource_group, self.namespace, self.name, param)
except Exception as exc:
self.fail('Error creating or updating queue {0} - {1}'.format(self.name, str(exc.inner_exception) or str(exc)))
def delete(self):
try:
client = self._get_client()
client.delete(self.resource_group, self.namespace, self.name)
return True
except Exception as exc:
self.fail("Error deleting queue {0} - {1}".format(self.name, str(exc)))
def _get_client(self):
return self.servicebus_client.queues
def get(self):<|fim▁hole|> return client.get(self.resource_group, self.namespace, self.name)
except Exception:
return None
def to_dict(self, instance):
result = dict()
attribute_map = self.servicebus_models.SBQueue._attribute_map
for attribute in attribute_map.keys():
value = getattr(instance, attribute)
if not value:
continue
if attribute_map[attribute]['type'] == 'duration':
if is_valid_timedelta(value):
key = duration_spec_map.get(attribute) or attribute
result[key] = int(value.total_seconds())
elif attribute == 'status':
result['status'] = _camel_to_snake(value)
elif isinstance(value, self.servicebus_models.MessageCountDetails):
result[attribute] = value.as_dict()
elif isinstance(value, self.servicebus_models.SBSku):
result[attribute] = value.name.lower()
elif isinstance(value, datetime):
result[attribute] = str(value)
elif isinstance(value, str):
result[attribute] = to_native(value)
elif attribute == 'max_size_in_megabytes':
result['max_size_in_mb'] = value
else:
result[attribute] = value
return result
def is_valid_timedelta(value):
if value == timedelta(10675199, 10085, 477581):
return None
return value
def main():
AzureRMServiceBusQueue()
if __name__ == '__main__':
main()<|fim▁end|> | try:
client = self._get_client() |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from hamcrest.library import *<|fim▁hole|><|fim▁end|> | from hamcrest.core import * |
<|file_name|>environment_scoop_test.py<|end_file_name|><|fim▁begin|>__author__ = 'Robert Meyer'
try:
import scoop
except ImportError:
scoop = None
def scoop_not_functional_check():
if scoop is not None and scoop.IS_RUNNING:
print('SCOOP mode functional!')
return False
else:
print('SCOOP NOT running!')
return True
from pypet.tests.integration.environment_test import EnvironmentTest, ResultSortTest
from pypet.tests.integration.environment_multiproc_test import check_nice
import pypet.pypetconstants as pypetconstants
from pypet.tests.testutils.ioutils import parse_args, run_suite
from pypet.tests.testutils.data import unittest
@unittest.skipIf(scoop_not_functional_check(), 'Only makes sense if scoop is installed and running')
class MultiprocSCOOPNetqueueTest(EnvironmentTest):
tags = 'integration', 'hdf5', 'environment', 'multiproc', 'netqueue', 'scoop'
def set_mode(self):
super(MultiprocSCOOPNetqueueTest, self).set_mode()
self.mode = pypetconstants.WRAP_MODE_NETQUEUE
self.multiproc = True
self.freeze_input = False
self.ncores = 4
self.gc_interval = 3
self.niceness = check_nice(1)
self.use_pool = False
self.use_scoop = True
self.graceful_exit = False
@unittest.skip('Does not work with scoop (fully), because scoop uses main frame.')
def test_niceness(self):
pass
# def test_run(self):
# return super(MultiprocSCOOPLocalTest, self).test_run()
@unittest.skipIf(scoop_not_functional_check(), 'Only makes sense if scoop is installed')
class MultiprocSCOOPSortLocalTest(ResultSortTest):
tags = 'integration', 'hdf5', 'environment', 'multiproc', 'local', 'scoop'
def set_mode(self):
super(MultiprocSCOOPSortLocalTest, self).set_mode()
self.mode = pypetconstants.WRAP_MODE_LOCAL
self.freeze_input = False
self.multiproc = True
self.ncores = 4
self.use_pool = False
self.use_scoop = True
self.graceful_exit = False
@unittest.skip('Does not work with SCOOP')
def test_graceful_exit(self):
pass
@unittest.skipIf(scoop_not_functional_check(), 'Only makes sense if scoop is installed')
class MultiprocFrozenSCOOPLocalTest(EnvironmentTest):
tags = 'integration', 'hdf5', 'environment', 'multiproc', 'local', 'scoop', 'freeze_input'
def set_mode(self):
super(MultiprocFrozenSCOOPLocalTest, self).set_mode()
self.mode = pypetconstants.WRAP_MODE_LOCAL
self.multiproc = True
self.freeze_input = True
self.ncores = 4
self.gc_interval = 3
self.niceness = check_nice(1)
self.use_pool = False
self.use_scoop = True
self.graceful_exit = False
@unittest.skip('Does not work with scoop (fully), because scoop uses main frame.')
def test_niceness(self):
pass
# def test_run(self):
# return super(MultiprocSCOOPLocalTest, self).test_run()
# @unittest.skipIf(scoop is None, 'Only makes sense if scoop is installed')
# class MultiprocFrozenSCOOPSortLocalTest(ResultSortTest):
#
# tags = 'integration', 'hdf5', 'environment', 'multiproc', 'local', 'scoop', 'freeze_input'
#
# def set_mode(self):
# super(MultiprocFrozenSCOOPSortLocalTest, self).set_mode()
# self.mode = pypetconstants.WRAP_MODE_LOCAL
# self.freeze_input = True
# self.multiproc = True
# self.ncores = 4
# self.use_pool = False
# self.use_scoop = True
@unittest.skipIf(scoop_not_functional_check(), 'Only makes sense if scoop is installed')
class MultiprocFrozenSCOOPSortNetlockTest(ResultSortTest):
tags = 'integration', 'hdf5', 'environment', 'multiproc', 'netlock', 'scoop', 'freeze_input'
def set_mode(self):
super(MultiprocFrozenSCOOPSortNetlockTest, self).set_mode()
self.mode = pypetconstants.WRAP_MODE_NETLOCK
self.freeze_input = True
self.multiproc = True
self.ncores = 4
self.use_pool = False
self.use_scoop = True
self.port = (10000, 60000)
self.graceful_exit = False
@unittest.skip('Does not work with SCOOP')
def test_graceful_exit(self):
pass
@unittest.skipIf(scoop_not_functional_check(), 'Only makes sense if scoop is installed')
class MultiprocFrozenSCOOPSortNetqueueTest(ResultSortTest):
tags = 'integration', 'hdf5', 'environment', 'multiproc', 'netqueue', 'scoop', 'freeze_input', 'mehmet'
def set_mode(self):
super(MultiprocFrozenSCOOPSortNetqueueTest, self).set_mode()
self.mode = pypetconstants.WRAP_MODE_NETQUEUE
self.freeze_input = True
self.multiproc = True
self.ncores = 4<|fim▁hole|> self.use_scoop = True
self.graceful_exit = False
#self.port = 'tcp://127.0.0.1:22334'
@unittest.skip('Does not work with SCOOP')
def test_graceful_exit(self):
pass
# @unittest.skipIf(scoop is None, 'Only makes sense if scoop is installed')
# class MultiprocSCOOPNetqueueTest(EnvironmentTest):
#
# tags = 'integration', 'hdf5', 'environment', 'multiproc', 'netqueue', 'scoop'
#
# def set_mode(self):
# super(MultiprocSCOOPNetqueueTest, self).set_mode()
# self.mode = pypetconstants.WRAP_MODE_NETQUEUE
# self.multiproc = True
# self.freeze_input = False
# self.ncores = 4
# self.gc_interval = 3
# self.niceness = check_nice(1)
# self.use_pool = False
# self.use_scoop = True
# self.port = None
# self.timeout = 9999.99
@unittest.skipIf(scoop_not_functional_check(), 'Only makes sense if scoop is installed')
class MultiprocSCOOPNetlockTest(EnvironmentTest):
tags = 'integration', 'hdf5', 'environment', 'multiproc', 'netlock', 'scoop'
def set_mode(self):
super(MultiprocSCOOPNetlockTest, self).set_mode()
self.mode = pypetconstants.WRAP_MODE_NETLOCK
self.multiproc = True
self.freeze_input = False
self.ncores = 4
self.gc_interval = 3
self.niceness = check_nice(1)
self.use_pool = False
self.use_scoop = True
self.port = None
self.timeout = 1099.99
self.graceful_exit = False
# self.port = 'tcp://127.0.0.1:22334'
@unittest.skip('Does not work with scoop (fully), because scoop uses main frame.')
def test_niceness(self):
pass
if __name__ == '__main__':
opt_args = parse_args()
run_suite(**opt_args)<|fim▁end|> | self.use_pool = False |
<|file_name|>register-device.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2019, Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const functions = require('firebase-functions');
const { google } = require('googleapis');
const { firestore } = require('../admin');
/**
* Return a Promise to obtain the device from Cloud IoT Core
*/
function getDevice(client, deviceId) {
return new Promise((resolve, reject) => {
const projectId = process.env.GCLOUD_PROJECT;
const parentName = `projects/${projectId}/locations/${functions.config().cloudiot.region}`;
const registryName = `${parentName}/registries/${functions.config().cloudiot.registry}`;
const request = {
name: `${registryName}/devices/${deviceId}`
};
client.projects.locations.registries.devices.get(request, (err, resp) => {
if (err) {
return reject(err);
} else {
resolve(resp.data);
}
});
});
}
/**
* Validate that the public key provided by the pending device matches
* the key currently stored in IoT Core for that device id.
*
* Method throws an error if the keys do not match.
*/
function verifyDeviceKey(pendingDevice, deviceKey) {
// Convert the pending key into PEM format
const chunks = pendingDevice.public_key.match(/(.{1,64})/g);
chunks.unshift('-----BEGIN PUBLIC KEY-----');
chunks.push('-----END PUBLIC KEY-----');
const pendingKey = chunks.join('\n');
if (deviceKey !== pendingKey) throw new Error(`Public Key Mismatch:\nExpected: ${deviceKey}\nReceived: ${pendingKey}`);
}
/**
* Cloud Function: Verify IoT device and add to user
*/
module.exports = functions.firestore.document('pending/{device}').onWrite(async (change, context) => {
const deviceId = context.params.device;
// Verify this is either a create or update
if (!change.after.exists) {
console.log(`Pending device removed for ${deviceId}`);
return;
}
console.log(`Pending device created for ${deviceId}`);
const pending = change.after.data();
// Create a new Cloud IoT client
const auth = await google.auth.getClient({
scopes: ['https://www.googleapis.com/auth/cloud-platform']
});
const client = google.cloudiot({
version: 'v1',
auth: auth
});
try {
// Verify device does NOT already exist in Firestore
const deviceRef = firestore.doc(`devices/${deviceId}`);
const deviceDoc = await deviceRef.get();
if (deviceDoc.exists) throw new Error(`${deviceId} is already registered to another user`);
// Verify device exists in IoT Core
const result = await getDevice(client, deviceId);
// Verify the device public key
verifyDeviceKey(pending, result.credentials[0].publicKey.key.trim());
// Verify the device type
let configValue = null;
switch (pending.type) {
case 'light':
configValue = require('./default-light.json');
break;<|fim▁hole|> default:
throw new Error(`Invalid device type found in ${deviceId}: ${pending.type}`);
}
// Commit the following changes together
const batch = firestore.batch();
// Insert valid device for the requested owner
const device = {
name: pending.serial_number,
owner: pending.owner,
type: pending.type,
online: false
};
batch.set(deviceRef, device);
// Generate a default configuration
const configRef = firestore.doc(`device-configs/${deviceId}`);
const config = {
owner: pending.owner,
value: configValue
};
batch.set(configRef, config);
// Remove the pending device entry
batch.delete(change.after.ref);
await batch.commit();
console.log(`Added device ${deviceId} for user ${pending.owner}`);
} catch (error) {
// Device does not exist in IoT Core or key doesn't match
console.error('Unable to register new device', error);
}
});<|fim▁end|> | case 'thermostat':
configValue = require('./default-thermostat.json');
break; |
<|file_name|>service_1.go<|end_file_name|><|fim▁begin|>package main
import (
"os"
"os/exec"
"syscall"
)<|fim▁hole|>
binary, lookErr := exec.LookPath("service_1.bash")
if lookErr != nil {
panic(lookErr)
}
args := []string{""}
env := os.Environ()
execErr := syscall.Exec(binary, args, env)
if execErr != nil {
panic(execErr)
}
/*
// START OMIT
curl -d "hi https://europe-west1-cloud-functions-talk-22365.cloudfunctions.net/HelloWorld \
hi says 'Hello, World!'" \
https://europe-west1-cloud-functions-talk-22365.cloudfunctions.net/register
// END OMIT
*/
}<|fim▁end|> |
func main() { |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.contrib.auth.decorators import user_passes_test
from Aluno.models import Aluno
def check_aluno_exist(user):
if not user.is_authenticated():
return False
try:
aluno = user.aluno_set.get()<|fim▁hole|>
aluno_exist = user_passes_test(lambda u: check_aluno_exist(u))<|fim▁end|> | return True
except Aluno.DoesNotExist:
return False |
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package com.lee.game;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.View;
import android.widget.TextView;
import com.lee.base.activity.BaseActivity;
import com.lee.base.application.PackageNameContainer;
import com.noobyang.log.LogUtil;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;<|fim▁hole|>import java.util.List;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
/**
* Main Activity
* <p/>
* Created by LiYang on 2019/4/8.
*/
public class MainActivity extends BaseActivity {
private static final String ACTION_SAMPLE_CODE = "com.lee.main.action.SAMPLE_CODE_GAME";
private static final String EXTRA_NAME_PATH = "com.lee.main.Path";
private static final String PATH_DIVIDED_SYMBOLS = ".";
private static final String PATH_DIVIDED_SYMBOLS_REGEX = "\\.";
@BindView(R.id.tv_path)
TextView tvPath;
@BindView(R.id.rv_sample_code)
RecyclerView rvSampleCode;
private PackageManager packageManager;
private List<SampleCodeEntity> sampleCodeEntities;
private SampleCodeAdapter sampleCodeAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ButterKnife.bind(this);
initData();
initView();
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
setIntent(intent);
updateSampleCodes();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
private void initData() {
packageManager = getPackageManager();
sampleCodeAdapter = new SampleCodeAdapter(this, sampleCodeEntities, itemClickListener);
}
private void initView() {
LinearLayoutManager layoutManager = new LinearLayoutManager(this);
layoutManager.setOrientation(LinearLayoutManager.VERTICAL);
rvSampleCode.setLayoutManager(layoutManager);
rvSampleCode.setAdapter(sampleCodeAdapter);
updateSampleCodes();
}
private void updateSampleCodes() {
String path = getIntent().getStringExtra(EXTRA_NAME_PATH);
initSampleCodes(path);
sampleCodeAdapter.setData(sampleCodeEntities);
sampleCodeAdapter.notifyDataSetChanged();
setPathText(path);
}
private void setPathText(String path) {
if (TextUtils.isEmpty(path)) {
tvPath.setText(R.string.app_name);
} else {
tvPath.setText(path);
}
}
protected void initSampleCodes(String path) {
if (sampleCodeEntities == null) {
sampleCodeEntities = new ArrayList<>();
} else {
sampleCodeEntities.clear();
}
List<ResolveInfo> sampleCodeResolveInfoList = getSampleCodeResolveInfoList();
if (sampleCodeResolveInfoList == null || sampleCodeResolveInfoList.size() == 0) {
return;
}
String[] prefixPaths;
String currentPrefixPath;
Map<String, Boolean> folderLabel = new HashMap<>();
String label;
String[] labelPath;
String sampleCodeLabel;
for (ResolveInfo sampleCodeResolveInfo : sampleCodeResolveInfoList) {
if (TextUtils.isEmpty(path)) {
prefixPaths = null;
currentPrefixPath = null;
} else {
path = getRelativeName(path);
prefixPaths = path.split(PATH_DIVIDED_SYMBOLS_REGEX);
currentPrefixPath = path + PATH_DIVIDED_SYMBOLS;
}
label = getRelativeName(sampleCodeResolveInfo.activityInfo.name);
LogUtil.d("getData currentPrefixPath = " + currentPrefixPath + "---label = " + label);
if (TextUtils.isEmpty(currentPrefixPath) || label.startsWith(currentPrefixPath)) {
labelPath = label.split(PATH_DIVIDED_SYMBOLS_REGEX);
int prefixPathsLen = prefixPaths == null ? 0 : prefixPaths.length;
sampleCodeLabel = labelPath[prefixPathsLen];
if (prefixPathsLen == labelPath.length - 1) {
// activity
addActivityItem(sampleCodeEntities, sampleCodeLabel,
sampleCodeResolveInfo.activityInfo.applicationInfo.packageName,
sampleCodeResolveInfo.activityInfo.name);
} else {
// folder
if (folderLabel.get(sampleCodeLabel) == null) {
addFolderItem(sampleCodeEntities, sampleCodeLabel, currentPrefixPath);
folderLabel.put(sampleCodeLabel, true);
}
}
}
}
Collections.sort(sampleCodeEntities, comparator);
}
private String getRelativeName(String className) {
if (TextUtils.isEmpty(className)) {
return className;
}
for (String packageName : PackageNameContainer.getPackageNames()) {
if (className.startsWith(packageName + PATH_DIVIDED_SYMBOLS)) {
return className.substring(packageName.length() + 1);
}
}
return className;
}
private List<ResolveInfo> getSampleCodeResolveInfoList() {
Intent sampleCodeIntent = new Intent(ACTION_SAMPLE_CODE, null);
sampleCodeIntent.addCategory(Intent.CATEGORY_SAMPLE_CODE);
return packageManager.queryIntentActivities(sampleCodeIntent, 0);
}
private final static Comparator<SampleCodeEntity> comparator =
new Comparator<SampleCodeEntity>() {
private final Collator collator = Collator.getInstance();
public int compare(SampleCodeEntity entity1, SampleCodeEntity entity2) {
return collator.compare(entity1.getTitle(), entity2.getTitle());
}
};
private void addActivityItem(List<SampleCodeEntity> data, String sampleCodeLabel,
String packageName, String className) {
Intent activityIntent = new Intent();
activityIntent.setClassName(packageName, className);
addItem(data, SampleCodeEntity.SampleCodeType.SAMPLE_CODE_TYPE_ACTIVITY, sampleCodeLabel, activityIntent);
}
private void addFolderItem(List<SampleCodeEntity> data, String sampleCodeLabel,
String currentPrefixPath) {
Intent folderIntent = new Intent();
folderIntent.setClass(this, MainActivity.class);
String path = TextUtils.isEmpty(currentPrefixPath) ? sampleCodeLabel : currentPrefixPath + sampleCodeLabel;
folderIntent.putExtra(EXTRA_NAME_PATH, path);
addItem(data, SampleCodeEntity.SampleCodeType.SAMPLE_CODE_TYPE_FOLDER, sampleCodeLabel, folderIntent);
}
protected void addItem(List<SampleCodeEntity> data, int type, String title, Intent intent) {
SampleCodeEntity entity = new SampleCodeEntity(type, title, intent);
data.add(entity);
}
private SampleCodeAdapter.OnItemClickListener itemClickListener =
new SampleCodeAdapter.OnItemClickListener() {
@Override
public void onItemClick(View view, int position) {
SampleCodeEntity entity = sampleCodeEntities.get(position);
if (entity != null) {
Intent intent = entity.getIntent();
intent.addCategory(Intent.CATEGORY_SAMPLE_CODE);
startActivity(intent);
}
}
};
}<|fim▁end|> | import java.util.HashMap; |
<|file_name|>workflow-controller.spec.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Created by Alex Levshin on 26/11/16.
*/
var RootFolder = process.env.ROOT_FOLDER;
if (!global.rootRequire) {
global.rootRequire = function (name) {
return require(RootFolder + '/' + name);
};
}
var restify = require('restify');
var _ = require('lodash');
var fs = require('fs');
var expect = require('chai').expect;
var ApiPrefix = '/api/v1';
var Promise = require('promise');
var config = rootRequire('config');
var util = require('util');
var WorktyRepositoryCodePath = RootFolder + '/workties-repository';
var SubVersion = config.restapi.getLatestVersion().sub; // YYYY.M.D
// Init the test client using supervisor account (all acl permissions)
var adminClient = restify.createJsonClient({
version: SubVersion,
url: config.restapi.getConnectionString(),
headers: {
'Authorization': config.supervisor.getAuthorizationBasic() // supervisor
},
rejectUnauthorized: false
});
describe('Workflow Rest API', function () {
var WorkflowsPerPage = 3;
var Workflows = [];
var WorktiesPerPage = 2;
var Workties = [];
var WorktiesInstances = [];
var WORKTIES_FILENAMES = ['unsorted/nodejs/unit-tests/without-delay.zip'];
console.log('Run Workflow API tests for version ' + ApiPrefix + '/' + SubVersion);
function _createPromises(callback, count) {
var promises = [];
for (var idx = 0; idx < count; idx++) {
promises.push(callback(idx));
}
return promises;
}
function _createWorkty(idx) {
return new Promise(function (resolve, reject) {
try {
var compressedCode = fs.readFileSync(WorktyRepositoryCodePath + '/' + WORKTIES_FILENAMES[0]);
adminClient.post(ApiPrefix + '/workties', {
name: 'myworkty' + idx,
desc: 'worktydesc' + idx,
compressedCode: compressedCode,
template: true
}, function (err, req, res, data) {
var workty = data;
adminClient.post(ApiPrefix + '/workties/' + data._id + '/properties', {
property: {
name: 'PropertyName',
value: 'PropertyValue'
}
}, function (err, req, res, data) {
workty.propertiesIds = [data];
resolve({res: res, data: workty});
});
});
} catch (ex) {
reject(ex);
}
});
}
function _createWorkflow(idx) {
return new Promise(function (resolve, reject) {
try {
adminClient.post(ApiPrefix + '/workflows', {
name: 'myworkflow' + idx,
desc: 'workflowdesc' + idx
}, function (err, req, res, data) {
resolve({res: res, data: data});
});
} catch (ex) {
reject(ex);
}
});
}
function _createWorktyInstance(idx) {
return new Promise(function (resolve, reject) {
try {
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances', {
name: 'worktyinstance' + idx,
desc: 'worktyinstance' + idx,
worktyId: Workties[idx]._id,
embed: 'properties'
}, function (err, req, res, data) {
resolve({res: res, data: data});
});
} catch (ex) {
reject(ex);
}
});
}
// Delete workflows and workties
function _deleteWorkty(idx) {
return new Promise(function (resolve, reject) {
try {
adminClient.del(ApiPrefix + '/workties/' + Workties[idx]._id, function (err, req, res, data) {
resolve({res: res});
});
} catch (ex) {
reject(ex);
}
});
}
function _deleteWorkflow(idx) {
return new Promise(function (resolve, reject) {
try {
adminClient.del(ApiPrefix + '/workflows/' + Workflows[idx]._id, function (err, req, res, data) {
resolve({res: res});
});
} catch (ex) {
reject(ex);
}
});
}
// Run once before the first test case
before(function (done) {
Promise.all(_createPromises(_createWorkty, WorktiesPerPage)).then(function (results) { // Create workties
for (var idx = 0; idx < results.length; idx++) {
var res = results[idx].res;
var data = results[idx].data;
expect(res.statusCode).to.equals(201);
expect(data).to.not.be.empty;
Workties.push(data);
}
return Promise.all(_createPromises(_createWorkflow, WorkflowsPerPage));
}).then(function (results) { // Create workflows
for (var idx = 0; idx < results.length; idx++) {
var res = results[idx].res;
var data = results[idx].data;
expect(res.statusCode).to.equals(201);
expect(data).to.not.be.empty;
Workflows.push(data);
}
return Promise.all(_createPromises(_createWorktyInstance, WorktiesPerPage));
}).then(function (results) { // Create workties instances
for (var idx = 0; idx < results.length; idx++) {
var res = results[idx].res;
var data = results[idx].data;
expect(res.statusCode).to.equals(201);
expect(data).to.not.be.empty;
WorktiesInstances.push(data);
}
}).done(function (err) {
expect(err).to.be.undefined;
done();
});
});
// Run once after the last test case
after(function (done) {
Promise.all(_createPromises(_deleteWorkty, WorktiesPerPage)).then(function (results) { // Delete workties
for (var idx = 0; idx < results.length; idx++) {
var res = results[idx].res;
expect(res.statusCode).to.equals(204);
}
return Promise.all(_createPromises(_deleteWorkflow, WorkflowsPerPage));
}).then(function (results) { // Delete workflows
for (var idx = 0; idx < results.length; idx++) {
var res = results[idx].res;
expect(res.statusCode).to.equals(204);
}
}).done(function (err) {
expect(err).to.be.undefined;
done();
});
});<|fim▁hole|> expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length.above(1);
done();
});
});
it('should get 3', function (done) {
adminClient.get(ApiPrefix + '/workflows?page_num=1&per_page=3', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(3);
done();
});
});
it('should get 2', function (done) {
adminClient.get(ApiPrefix + '/workflows?per_page=2', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(2);
done();
});
});
it('should get records-count', function (done) {
adminClient.get(ApiPrefix + '/workflows?per_page=3&count=true', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(3);
expect(res.headers).to.contain.keys('records-count');
expect(res.headers['records-count']).equals('3');
done();
});
});
it('should get sorted', function (done) {
adminClient.get(ApiPrefix + '/workflows?per_page=3&sort=_id', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(3);
expect(data).to.satisfy(function (workflows) {
var currentValue = null;
_.each(workflows, function (workflow) {
if (!currentValue) {
currentValue = workflow._id;
} else {
if (workflow._id <= currentValue) expect(true).to.be.false();
currentValue = workflow._id;
}
});
return true;
});
done();
});
});
it('should get fields', function (done) {
adminClient.get(ApiPrefix + '/workflows?per_page=3&fields=_id,name,desc', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(3);
expect(data).to.satisfy(function (workflows) {
_.each(workflows, function (workflow) {
expect(workflow).to.have.keys(['_id', 'name', 'desc']);
});
return true;
});
done();
});
});
it('should get embed fields', function (done) {
adminClient.get(ApiPrefix + '/workflows?per_page=3&embed=worktiesInstances,account', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(3);
expect(data).to.satisfy(function (workflows) {
_.each(workflows, function (workflow) {
expect(workflow).to.contain.keys('accountId', 'worktiesInstancesIds');
expect(workflow.accountId).to.contain.keys('_id');
if (workflow.worktiesInstancesIds.length > 0) {
expect(workflow.worktiesInstancesIds[0]).to.contain.keys('_id');
}
});
return true;
});
done();
});
});
});
describe('.getById()', function () {
it('should get a 200 response', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
done();
});
});
it('should get a 500 response not found', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + 'N', function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
expect(data).to.not.be.empty;
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'inputParameters']);
expect(data.error.code).to.equals(1);
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
it('should get records-count', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '?count=true', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(res.headers).to.contain.keys('records-count');
expect(res.headers['records-count']).equals('1');
done();
});
});
it('should get fields', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '?fields=_id,name,desc', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data).to.have.keys(['_id', 'name', 'desc']);
done();
});
});
it('should get embed fields', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '?embed=worktiesInstances,account', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data).to.contain.keys('accountId', 'worktiesInstancesIds');
expect(data.accountId).to.contain.keys('_id');
if (data.worktiesInstancesIds.length > 0) {
expect(data.worktiesInstancesIds[0]).to.contain.keys('_id');
}
done();
});
});
});
describe('.add()', function () {
it('should get a 409 response', function (done) {
adminClient.post(ApiPrefix + '/workflows', function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(409);
var error = JSON.parse(err.message).error;
expect(error.message).to.equals("Validation Error");
expect(error.errors).to.have.length(1);
expect(error.errors[0].message).to.equals("Path `name` is required.");
done();
});
});
it('should get a 201 response', function (done) {
// Create workflow
adminClient.post(ApiPrefix + '/workflows', {
name: 'mytestworkflow',
desc: 'testworkflow'
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
var workflowId = data._id;
expect(res.headers.location).to.have.string('/' + workflowId);
expect(data.name).to.be.equal('mytestworkflow');
expect(data.desc).to.be.equal('testworkflow');
// Delete workflow
adminClient.del(res.headers.location, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(204);
done();
});
});
});
});
describe('.update()', function () {
it('should get a 400 response', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id, function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(400);
var error = JSON.parse(err.message).error;
expect(error.errors).is.empty;
done();
});
});
it('should get a 409 response', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id, {name: ''}, function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(409);
var error = JSON.parse(err.message).error;
expect(error.errors).to.have.length(1);
expect(error.errors[0].message).to.equals("Path `name` is required.");
done();
});
});
it('should get a 200 response', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id, {
name: 'mytestworkflow',
desc: 'testworkflow'
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.null;
var workflowId = data._id;
expect(workflowId).to.equals(Workflows[0]._id);
expect(data.name).to.be.equal('mytestworkflow');
expect(data.desc).to.be.equal('testworkflow');
done();
});
});
});
describe('.del()', function () {
it('should get a 500 response not found', function (done) {
// Delete workflow
adminClient.del(ApiPrefix + '/workflows/' + Workflows[0]._id + 'N', function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
done();
});
});
it('should get a 204 response', function (done) {
// Create workflow
adminClient.post(ApiPrefix + '/workflows', {
name: 'mytestworkflow',
desc: 'testworkflow'
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
var workflowId = data._id;
expect(res.headers.location).to.have.string('/' + workflowId);
// Delete workflow
adminClient.del(ApiPrefix + '/workflows/' + workflowId, function (err, req, res, data) {
expect(err).to.be.null;
expect(data).is.empty;
expect(res.statusCode).to.equals(204);
done();
});
});
});
});
describe('.run()', function () {
it('should get a 200 response', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + '/running', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
done();
});
});
it('should get a 500 response not found', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + 'N' + '/running', function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
expect(data).to.not.be.empty;
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'inputParameters']);
expect(data.error.code).to.be.equals(1);
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
describe('multiple workflows', function () {
var WorkflowExtraPerPage = 2;
var WorkflowExtraIds = [];
function _deleteExtraWorkflow(idx) {
return new Promise(function (resolve, reject) {
try {
adminClient.del(ApiPrefix + '/workflows/' + WorkflowExtraIds[idx], function (err, req, res, data) {
resolve({res: res});
});
} catch (ex) {
reject(ex);
}
});
}
// Run once before the first test case
before(function (done) {
Promise.all(_createPromises(_createWorkflow, WorkflowExtraPerPage)).then(function (results) { // Create workflows
for (var idx = 0; idx < results.length; idx++) {
var res = results[idx].res;
var data = results[idx].data;
expect(res.statusCode).to.equals(201);
expect(data).to.not.be.empty;
WorkflowExtraIds.push(data._id);
}
}).done(function (err) {
expect(err).to.be.undefined;
done();
});
});
// Run once after the last test case
after(function (done) {
Promise.all(_createPromises(_deleteExtraWorkflow, WorkflowExtraPerPage)).then(function (results) { // Delete workflows
for (var idx = 0; idx < results.length; idx++) {
var res = results[idx].res;
expect(res.statusCode).to.equals(204);
}
}).done(function (err) {
expect(err).to.be.undefined;
done();
});
});
});
});
describe('.stop()', function () {
it('should get a 200 response', function (done) {
// Run workflow
adminClient.del(ApiPrefix + '/workflows/' + Workflows[0]._id + '/running', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
done();
});
});
it('should get a 500 response not found', function (done) {
adminClient.del(ApiPrefix + '/workflows/' + Workflows[0]._id + 'N' + '/running', function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
expect(data).to.not.be.empty;
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'inputParameters']);
expect(data.error.code).to.be.equals(1);
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
it('should get a 200 response after two stops', function (done) {
// Run workflow
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + '/running', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
// Stop workflow twice
adminClient.del(ApiPrefix + '/workflows/' + Workflows[0]._id + '/running', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
adminClient.del(ApiPrefix + '/workflows/' + Workflows[0]._id + '/running', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
done();
});
});
});
});
});
describe('.resume()', function () {
it('should get a 200 response', function (done) {
// Resume workflow
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + '/running', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
done();
});
});
it('should get a 500 response not found', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + 'N' + '/running', function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
expect(data).to.not.be.empty;
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'inputParameters']);
expect(data.error.code).to.be.equals(1);
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
});
describe('Workties instances', function () {
describe('.getAllWorktiesInstances()', function () {
it('should get a 200 response', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length.above(0);
expect(data[0].workflowId).to.equals(Workflows[0]._id);
done();
});
});
it('should get 2', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?per_page=2', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(2);
done();
});
});
it('should get records-count', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?per_page=2&count=true', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(2);
expect(res.headers).to.contain.keys('records-count');
expect(res.headers['records-count']).equals('2');
done();
});
});
it('should get fields', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?per_page=2&fields=_id,desc,created', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(2);
expect(data).to.satisfy(function (workflowInstances) {
_.each(workflowInstances, function (workflowInstance) {
expect(workflowInstance).to.have.keys(['_id', 'desc', 'created']);
});
return true;
});
done();
});
});
it('should get embed fields', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?per_page=2&embed=workflow,state', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.have.length(2);
expect(data).to.satisfy(function (workflowInstances) {
_.each(workflowInstances, function (workflowInstance) {
expect(workflowInstance).to.contain.keys('stateId', 'workflowId');
expect(workflowInstance.workflowId).to.contain.keys('_id');
if (workflowInstance.stateId.length > 0) {
expect(workflowInstance.stateId[0]).to.contain.keys('_id');
}
});
return true;
});
done();
});
});
});
describe('.getWorktyInstanceById()', function () {
it('should get a 200 response', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
done();
});
});
it('should get a 500 response not found', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id + 'N', function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
expect(data).to.not.be.empty;
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'inputParameters']);
expect(data.error.code).to.equals(1);
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
it('should get records-count', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id + '?count=true', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(res.headers).to.contain.keys('records-count');
expect(res.headers['records-count']).equals('1');
done();
});
});
it('should get fields', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id + '?fields=_id,desc', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data).to.have.keys(['_id', 'desc']);
done();
});
});
it('should get embed fields', function (done) {
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id + '?embed=workflow,state', function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data).to.contain.keys('stateId', 'workflowId');
expect(data.stateId).to.contain.keys('_id');
expect(data.workflowId).to.contain.keys('_id');
expect(data.workflowId._id).to.equals(Workflows[0]._id);
done();
});
});
});
describe('.addWorktyInstance()', function () {
it('should get a 201 response', function (done) {
// Create workty instance
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances', {
desc: 'descworktyinstance4',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
expect(data.worktyId).to.be.equal(Workties[0]._id);
expect(data.desc).to.be.equal('descworktyinstance4');
// Delete workty instance
adminClient.del(res.headers.location, function (err, req, res, data) {
expect(err).to.be.null;
expect(data).is.empty;
expect(res.statusCode).to.equals(204);
done();
});
});
});
it('should get a 500 response with code 12 position type is unknown', function (done) {
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_type=unknown', {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'inputParameters']);
expect(data.error.code).to.equals(12);
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
it('should get a 201 response for position type is last', function (done) {
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_type=last', {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
var worktyInstanceId = data._id;
expect(res.headers.location).to.have.string('worktiesInstances/' + worktyInstanceId);
expect(data.desc).to.be.equal('testworkty');
var headerLocation = res.headers.location;
// Get workflow to check workty instance added in last position
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data.worktiesInstancesIds).to.satisfy(function (worktiesInstancesIds) {
if (worktiesInstancesIds.length !== 3) {
return false;
}
return worktyInstanceId === worktiesInstancesIds[worktiesInstancesIds.length - 1];
});
// Delete workty instance
adminClient.del(headerLocation, function (err, req, res, data) {
expect(err).to.be.null;
expect(data).is.empty;
expect(res.statusCode).to.equals(204);
done();
});
});
});
});
it('should get a 201 response for position type is first', function (done) {
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_type=first', {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
var worktyInstanceId = data._id;
expect(res.headers.location).to.have.string('worktiesInstances/' + worktyInstanceId);
expect(data.desc).to.be.equal('testworkty');
var headerLocation = res.headers.location;
// Get workflow to check workty instance added in first position
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data.worktiesInstancesIds).to.satisfy(function (worktiesInstancesIds) {
if (worktiesInstancesIds.length !== 3) {
return false;
}
return worktyInstanceId === worktiesInstancesIds[0];
});
// Delete workty instance
adminClient.del(headerLocation, function (err, req, res, data) {
expect(err).to.be.null;
expect(data).is.empty;
expect(res.statusCode).to.equals(204);
done();
});
});
});
});
it('should get a 201 response for position index is 0 among 4 values', function (done) {
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_index=0', {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
var worktyInstanceId = data._id;
expect(res.headers.location).to.have.string('worktiesInstances/' + worktyInstanceId);
expect(data.desc).to.be.equal('testworkty');
var headerLocation = res.headers.location;
// Get workflow to check workty instance added by index 1
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data.worktiesInstancesIds).to.satisfy(function (worktiesInstancesIds) {
if (worktiesInstancesIds.length !== 3) {
return false;
}
return _.indexOf(worktiesInstancesIds, worktyInstanceId) === 0;
});
// Delete workty instance
adminClient.del(headerLocation, function (err, req, res, data) {
expect(err).to.be.null;
expect(data).is.empty;
expect(res.statusCode).to.equals(204);
done();
});
});
});
});
it('should get a 500 response with code 10 for position index is -1', function (done) {
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_index=-1', {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'inputParameters']);
expect(data.error.code).to.equals(10);
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
it('should get a 500 response with code 11 for missing position id', function (done) {
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_id=N', {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'inputParameters']);
expect(data.error.code).to.equals(11);
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
it('should get a 201 response for position id', function (done) {
// Insert workty by index 0
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_index=0', {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
var worktyInstanceId = data._id;
expect(res.headers.location).to.have.string('worktiesInstances/' + worktyInstanceId);
expect(data.desc).to.be.equal('testworkty');
var headerLocationFirst = res.headers.location;
// Get workflow to check workty instance added by index 1
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data.worktiesInstancesIds).to.satisfy(function (worktiesInstancesIds) {
if (worktiesInstancesIds.length !== 3) {
return false;
}
return _.indexOf(worktiesInstancesIds, worktyInstanceId) === 0;
});
// Insert workty instance before worktyInstanceId
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_id=' + worktyInstanceId, {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
worktyInstanceId = data._id;
expect(res.headers.location).to.have.string('worktiesInstances/' + worktyInstanceId);
expect(data.desc).to.be.equal('testworkty');
var headerLocation = res.headers.location;
// Get workflow to check workty instance added by index 1
adminClient.get(ApiPrefix + '/workflows/' + Workflows[0]._id, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.empty;
expect(data.worktiesInstancesIds).to.satisfy(function (worktiesInstancesIds) {
if (worktiesInstancesIds.length !== 4) {
return false;
}
return _.indexOf(worktiesInstancesIds, worktyInstanceId) === 0;
});
// Delete first workty instance
adminClient.del(headerLocationFirst, function (err, req, res, data) {
expect(err).to.be.null;
expect(data).is.empty;
expect(res.statusCode).to.equals(204);
// Delete second workty instance
adminClient.del(headerLocation, function (err, req, res, data) {
expect(err).to.be.null;
expect(data).is.empty;
expect(res.statusCode).to.equals(204);
done();
});
});
});
});
});
});
});
it('should get a 500 response with code 1 for missing worktyId', function (done) {
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances?position_index=-1', {desc: 'testworkty'}, function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(409);
expect(data).to.have.keys('error');
expect(data.error).to.have.keys(['code', 'error_link', 'message', 'errors', 'inputParameters']);
expect(data.error.code).is.empty;
expect(data.error.error_link).to.not.be.empty;
expect(data.error.message).to.not.be.empty;
done();
});
});
});
describe('.updateWorktyInstance()', function () {
it('should get a 400 response', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id, function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(400);
var error = JSON.parse(err.message).error;
expect(error.errors).is.empty;
done();
});
});
it('should get a 200 response', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id, {desc: 'updateddesc'}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data).to.not.be.null;
expect(data.desc).to.be.equal('updateddesc');
done();
});
});
});
describe('.delWorktyInstance()', function () {
it('should get a 500 response not found', function (done) {
// Delete workty instance
adminClient.del(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id + 'N', function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(500);
done();
});
});
it('should get a 204 response', function (done) {
// Create workty instance
adminClient.post(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances', {
desc: 'testworkty',
worktyId: Workties[0]._id
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(201);
expect(res.headers).to.contain.keys('location');
expect(data).to.not.be.null;
var workflowId = data.workflowId;
var worktyInstanceId = data._id;
expect(res.headers.location).to.have.string('/' + workflowId);
// Delete workty instance
adminClient.del(ApiPrefix + '/workflows/' + workflowId + '/worktiesInstances/' + worktyInstanceId, function (err, req, res, data) {
expect(err).to.be.null;
expect(data).is.empty;
expect(res.statusCode).to.equals(204);
done();
});
});
});
});
describe('.updateWorktyInstanceProperty()', function () {
it('should get a 400 response', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id + '/properties/' + WorktiesInstances[0].propertiesIds[0]._id, function (err, req, res, data) {
expect(err).to.not.be.null;
expect(res.statusCode).to.equals(400);
var error = JSON.parse(err.message).error;
expect(error.errors).is.empty;
done();
});
});
it('should get a 200 response', function (done) {
adminClient.put(ApiPrefix + '/workflows/' + Workflows[0]._id + '/worktiesInstances/' + WorktiesInstances[0]._id + '/properties/' + WorktiesInstances[0].propertiesIds[0]._id, {
name: 'NewPropertyName',
value: 'NewPropertyValue'
}, function (err, req, res, data) {
expect(err).to.be.null;
expect(res.statusCode).to.equals(200);
expect(data.name).to.be.equal('NewPropertyName');
expect(data.value).to.be.equal('NewPropertyValue');
done();
});
});
});
});
});<|fim▁end|> |
describe('.getAll()', function () {
it('should get a 200 response', function (done) {
adminClient.get(ApiPrefix + '/workflows', function (err, req, res, data) { |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render
from django.middleware.csrf import get_token
from ajaxuploader.views import AjaxFileUploader
from pandora.backends import SignalBasedLocalUploadBackend
from pandora.models import Item
def home(request):
return render(request, 'pandora/home.html', {
'items': Item.objects.all(),
'csrf_token': get_token(request)
})<|fim▁hole|><|fim▁end|> |
import_uploader = AjaxFileUploader(SignalBasedLocalUploadBackend) |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from __future__ import unicode_literals
from django.apps import AppConfig
class SequencerConfig(AppConfig):
name = 'sequencer'<|fim▁end|> | |
<|file_name|>dokku-installer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
import cgi
import json
import os
import re
import SimpleHTTPServer
import SocketServer
import subprocess
import sys
import threading
VERSION = 'v0.14.6'
hostname = ''
try:
command = "bash -c '[[ $(dig +short $HOSTNAME) ]] && echo $HOSTNAME || wget -q -O - icanhazip.com'"
hostname = subprocess.check_output(command, shell=True)
if ':' in hostname:
hostname = ''
except subprocess.CalledProcessError:
pass
key_file = os.getenv('KEY_FILE', None)
if os.path.isfile('/home/ec2-user/.ssh/authorized_keys'):
key_file = '/home/ec2-user/.ssh/authorized_keys'
elif os.path.isfile('/home/ubuntu/.ssh/authorized_keys'):
key_file = '/home/ubuntu/.ssh/authorized_keys'
else:
key_file = '/root/.ssh/authorized_keys'
admin_keys = []
if os.path.isfile(key_file):
try:
command = "cat {0}".format(key_file)
admin_keys = subprocess.check_output(command, shell=True).strip().split("\n")
except subprocess.CalledProcessError:
pass
def check_boot():
if 'onboot' not in sys.argv:
return
init_dir = os.getenv('INIT_DIR', '/etc/init')
systemd_dir = os.getenv('SYSTEMD_DIR', '/etc/systemd/system')
nginx_dir = os.getenv('NGINX_DIR', '/etc/nginx/conf.d')
if os.path.exists(init_dir):
with open('{0}/dokku-installer.conf'.format(init_dir), 'w') as f:
f.write("start on runlevel [2345]\n")
f.write("exec {0} selfdestruct\n".format(os.path.abspath(__file__)))
if os.path.exists(systemd_dir):
with open('{0}/dokku-installer.service'.format(systemd_dir), 'w') as f:
f.write("[Unit]\n")
f.write("Description=Dokku web-installer\n")
f.write("\n")
f.write("[Service]\n")
f.write("ExecStart={0} selfdestruct\n".format(os.path.abspath(__file__)))
f.write("\n")
f.write("[Install]\n")
f.write("WantedBy=multi-user.target\n")
f.write("WantedBy=graphical.target\n")
if os.path.exists(nginx_dir):
with open('{0}/dokku-installer.conf'.format(nginx_dir), 'w') as f:
f.write("upstream dokku-installer { server 127.0.0.1:2000; }\n")
f.write("server {\n")
f.write(" listen 80;\n")
f.write(" location / {\n")
f.write(" proxy_pass http://dokku-installer;\n")
f.write(" }\n")
f.write("}\n")
subprocess.call('rm -f /etc/nginx/sites-enabled/*', shell=True)
sys.exit(0)
class GetHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
content = PAGE.replace('{VERSION}', VERSION)
content = content.replace('{HOSTNAME}', hostname)
content = content.replace('{AUTHORIZED_KEYS_LOCATION}', key_file)
content = content.replace('{ADMIN_KEYS}', "\n".join(admin_keys))
self.send_response(200)
self.end_headers()
self.wfile.write(content)
def do_POST(self):
if self.path not in ['/setup', '/setup/']:
return
params = cgi.FieldStorage(fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type']})
vhost_enable = 'false'
dokku_root = os.getenv('DOKKU_ROOT', '/home/dokku')
if 'vhost' in params and params['vhost'].value == 'true':
vhost_enable = 'true'
with open('{0}/VHOST'.format(dokku_root), 'w') as f:
f.write(params['hostname'].value)
else:
try:
os.remove('{0}/VHOST'.format(dokku_root))
except OSError:
pass
with open('{0}/HOSTNAME'.format(dokku_root), 'w') as f:
f.write(params['hostname'].value)
for (index, key) in enumerate(params['keys'].value.splitlines(), 1):
user = 'admin'
if self.admin_user_exists() is not None:
user = 'web-admin'
if self.web_admin_user_exists() is not None:
index = int(self.web_admin_user_exists()) + 1
elif self.web_admin_user_exists() is None:
index = 1
elif self.admin_user_exists() is None:
pass
else:
index = int(self.admin_user_exists()) + 1
user = user + str(index)
command = ['sshcommand', 'acl-add', 'dokku', user]
proc = subprocess.Popen(command, stdin=subprocess.PIPE)
proc.stdin.write(key)
proc.stdin.close()
proc.wait()
set_debconf_selection('boolean', 'nginx_enable', 'true')
set_debconf_selection('boolean', 'skip_key_file', 'true')
set_debconf_selection('boolean', 'vhost_enable', vhost_enable)
set_debconf_selection('boolean', 'web_config', 'false')
set_debconf_selection('string', 'hostname', params['hostname'].value)
if 'selfdestruct' in sys.argv:
DeleteInstallerThread()
self.send_response(200)
self.end_headers()
self.wfile.write(json.dumps({'status': 'ok'}))
def web_admin_user_exists(self):
return self.user_exists('web-admin(\d+)')
def admin_user_exists(self):
return self.user_exists('admin(\d+)')
def user_exists(self, name):
command = 'dokku ssh-keys:list'
pattern = re.compile(r'NAME="' + name + '"')
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
max_num = 0
exists = False
for line in proc.stdout:
m = pattern.search(line)
if m:
# User of the form `user` or `user#` exists
exists = True
max_num = max(max_num, m.group(1))
if exists:
return max_num
else:
return None
def set_debconf_selection(debconf_type, key, value):
found = False
with open('/etc/os-release', 'r') as f:
for line in f:
if 'debian' in line:
found = True
if not found:
return
ps = subprocess.Popen(['echo', 'dokku dokku/{0} {1} {2}'.format(
key, debconf_type, value
)], stdout=subprocess.PIPE)
try:
subprocess.check_output(['debconf-set-selections'], stdin=ps.stdout)
except subprocess.CalledProcessError:
pass
ps.wait()
class DeleteInstallerThread(object):
def __init__(self, interval=1):
thread = threading.Thread(target=self.run, args=())
thread.daemon = True
thread.start()
def run(self):
command = "rm /etc/nginx/conf.d/dokku-installer.conf && /etc/init.d/nginx stop && /etc/init.d/nginx start"
try:
subprocess.call(command, shell=True)
except:
pass
command = "rm -f /etc/init/dokku-installer.conf /etc/systemd/system/dokku-installer.service && (stop dokku-installer || systemctl stop dokku-installer.service)"
try:
subprocess.call(command, shell=True)
except:
pass
def main():
check_boot()
port = int(os.getenv('PORT', 2000))
httpd = SocketServer.TCPServer(("", port), GetHandler)
print "Listening on 0.0.0.0:{0}, CTRL+C to stop".format(port)
httpd.serve_forever()
PAGE = """
<html>
<head>
<meta charset="utf-8" />
<title>Dokku Setup</title>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css" integrity="sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO" crossorigin="anonymous">
<style>
.bd-callout {
padding: 1.25rem;
margin-top: 1.25rem;
margin-bottom: 1.25rem;
border: 1px solid #eee;
border-left-width: .25rem;
border-radius: .25rem;
}
.bd-callout p:last-child {
margin-bottom: 0;
}
.bd-callout-info {
border-left-color: #5bc0de;
}
pre {
font-size: 80%;
margin-bottom: 0;
}<|fim▁hole|> h1 small {
font-size: 50%;
}
h5 {
font-size: 1rem;
}
.container {
width: 640px;
}
.result {
padding-left: 20px;
}
input.form-control, textarea.form-control {
background-color: #fafbfc;
font-size: 14px;
}
input.form-control::placeholder, textarea.form-control::placeholder {
color: #adb2b8
}
</style>
</head>
<body>
<div class="container">
<form id="form" role="form">
<h1 class="pt-3">Dokku Setup <small class="text-muted">{VERSION}</small></h1>
<div class="alert alert-warning small" role="alert">
<strong>Warning:</strong> The SSH key filled out here can grant root access to the server. Please complete the setup as soon as possible.
</div>
<div class="row">
<div class="col">
<h3>Admin Access</h3>
<div class="form-group">
<label for="key">Public SSH Keys</label><br />
<textarea class="form-control" name="keys" rows="5" id="key" placeholder="Begins with 'ssh-rsa', 'ssh-dss', 'ssh-ed25519', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', or 'ecdsa-sha2-nistp521'">{ADMIN_KEYS}</textarea>
<small class="form-text text-muted">Public keys allow users to ssh onto the server as the <code>dokku</code> user, as well as remotely execute Dokku commands. They are currently auto-populated from: <code>{AUTHORIZED_KEYS_LOCATION}</code>, and can be changed later via the <a href="http://dokku.viewdocs.io/dokku/deployment/user-management/" target="_blank"><code>dokku ssh-keys</code></a> plugin.</small>
</div>
</div>
</div>
<div class="row">
<div class="col">
<h3>Hostname Configuration</h3>
<div class="form-group">
<label for="hostname">Hostname</label>
<input class="form-control" type="text" id="hostname" name="hostname" value="{HOSTNAME}" placeholder="A hostname or ip address such as {HOSTNAME}" />
<small class="form-text text-muted">This will be used as the default host for all applications, and can be changed later via the <a href="http://dokku.viewdocs.io/dokku/configuration/domains/" target="_blank"><code>dokku domains:set-global</code></a> command.</small>
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="vhost" name="vhost" value="true">
<label class="form-check-label" for="vhost">Use virtualhost naming for apps</label>
<small class="form-text text-muted">When enabled, Nginx will be run on port 80 and proxy requests to apps based on hostname.</small>
<small class="form-text text-muted">When disabled, a specific port will be setup for each application on first deploy, and requests to that port will be proxied to the relevant app.</small>
</div>
<div class="bd-callout bd-callout-info">
<h5>What will app URLs look like?</h5>
<pre><code id="example">http://hostname:port</code></pre>
</div>
</div>
</div>
<button type="button" onclick="setup()" class="btn btn-primary">Finish Setup</button> <span class="result"></span>
</form>
</div>
<div id="error-output"></div>
<script>
var $ = document.querySelector.bind(document)
function setup() {
if ($("#key").value.trim() == "") {
alert("Your admin public key cannot be blank.")
return
}
if ($("#hostname").value.trim() == "") {
alert("Your hostname cannot be blank.")
return
}
var data = new FormData($("#form"))
var inputs = [].slice.call(document.querySelectorAll("input, textarea, button"))
inputs.forEach(function (input) {
input.disabled = true
})
var result = $(".result")
fetch("/setup", {method: "POST", body: data})
.then(function(response) {
if (response.ok) {
return response.json()
} else {
throw new Error('Server returned error')
}
})
.then(function(response) {
result.classList.add("text-success");
result.textContent = "Success! Redirecting in 3 seconds. .."
setTimeout(function() {
window.location.href = "http://dokku.viewdocs.io/dokku~{VERSION}/deployment/application-deployment/";
}, 3000);
})
.catch(function (error) {
result.classList.add("text-danger");
result.textContent = "Could not send the request"
})
}
function update() {
if ($("#vhost").matches(":checked") && $("#hostname").value.match(/^(\d{1,3}\.){3}\d{1,3}$/)) {
alert("In order to use virtualhost naming, the hostname must not be an IP but a valid domain name.")
$("#vhost").checked = false;
}
if ($("#vhost").matches(':checked')) {
$("#example").textContent = "http://<app-name>."+$("#hostname").value
} else {
$("#example").textContent = "http://"+$("#hostname").value+":<app-port>"
}
}
$("#vhost").addEventListener("change", update);
$("#hostname").addEventListener("input", update);
update();
</script>
</body>
</html>
"""
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>010-libphotofloat.js<|end_file_name|><|fim▁begin|>(function() {
/* constructor */
function PhotoFloat() {
this.albumCache = [];
}
/* public member functions */
PhotoFloat.prototype.album = function(subalbum, callback, error) {
var cacheKey, ajaxOptions, self;
if (typeof subalbum.photos !== "undefined" && subalbum.photos !== null) {
callback(subalbum);
return;
}
if (Object.prototype.toString.call(subalbum).slice(8, -1) === "String")
cacheKey = subalbum;
else
cacheKey = PhotoFloat.cachePath(subalbum.parent.path + "/" + subalbum.path);
if (this.albumCache.hasOwnProperty(cacheKey)) {
callback(this.albumCache[cacheKey]);
return;
}
self = this;
ajaxOptions = {
type: "GET",
dataType: "json",
url: "cache/" + cacheKey + ".json",
success: function(album) {
var i;
for (i = 0; i < album.albums.length; ++i)
album.albums[i].parent = album;
for (i = 0; i < album.photos.length; ++i)
album.photos[i].parent = album;
self.albumCache[cacheKey] = album;
callback(album);
}
};
if (typeof error !== "undefined" && error !== null) {
ajaxOptions.error = function(jqXHR, textStatus, errorThrown) {
error(jqXHR.status);
};
}
$.ajax(ajaxOptions);
};
PhotoFloat.prototype.albumPhoto = function(subalbum, callback, error) {
var nextAlbum, self;
self = this;
nextAlbum = function(album) {
//var index = Math.floor(Math.random() * (album.photos.length + album.albums.length));
if (1==1 && album.main != null && album.main != "") {
var index = 0;
for (index = 0; index < album.photos.length; ++index) {
if (PhotoFloat.cachePath(album.photos[index].name) === PhotoFloat.cachePath(album.main)) {
break;
}
}
callback(album, album.photos[index]);
}
else{
var index = 0;
if (album.photos.length > 0) {
index = album.photos.length - 1;
}
if (index >= album.photos.length) {
index -= album.photos.length;
self.album(album.albums[index], nextAlbum, error);
} else
callback(album, album.photos[index]);
}
};
if (typeof subalbum.photos !== "undefined" && subalbum.photos !== null)
nextAlbum(subalbum);
else
this.album(subalbum, nextAlbum, error);
};
PhotoFloat.prototype.parseHash = function(hash, callback, error) {
var index, album, photo;
hash = PhotoFloat.cleanHash(hash);
index = hash.lastIndexOf("/");
if (!hash.length) {
album = PhotoFloat.cachePath("root");
photo = null;
} else if (index !== -1 && index !== hash.length - 1) {
photo = hash.substring(index + 1);
album = hash.substring(0, index);
} else {
album = hash;
photo = null;
}
this.album(album, function(theAlbum) {
var i = -1;
if (photo !== null) {
for (i = 0; i < theAlbum.photos.length; ++i) {
if (PhotoFloat.cachePath(theAlbum.photos[i].name) === photo) {
photo = theAlbum.photos[i];<|fim▁hole|> }
}
if (i >= theAlbum.photos.length) {
photo = null;
i = -1;
}
}
callback(theAlbum, photo, i);
}, error);
};
PhotoFloat.prototype.authenticate = function(password, result) {
$.ajax({
type: "GET",
dataType: "text",
url: "auth?username=photos&password=" + password,
success: function() {
result(true);
},
error: function() {
result(false);
}
});
};
/* static functions */
PhotoFloat.cachePath = function(path) {
if (path === "")
return "root";
if (path.charAt(0) === "/")
path = path.substring(1);
path = path
.replace(/ /g, "_")
.replace(/\//g, "-")
.replace(/\(/g, "")
.replace(/\)/g, "")
.replace(/#/g, "")
.replace(/&/g, "")
.replace(/,/g, "")
.replace(/\[/g, "")
.replace(/\]/g, "")
.replace(/"/g, "")
.replace(/'/g, "")
.replace(/_-_/g, "-")
.toLowerCase();
while (path.indexOf("--") !== -1)
path = path.replace(/--/g, "-");
while (path.indexOf("__") !== -1)
path = path.replace(/__/g, "_");
return path;
};
PhotoFloat.photoHash = function(album, photo) {
return PhotoFloat.albumHash(album) + "/" + PhotoFloat.cachePath(photo.name);
};
PhotoFloat.albumHash = function(album) {
if (typeof album.photos !== "undefined" && album.photos !== null)
return PhotoFloat.cachePath(album.path);
return PhotoFloat.cachePath(album.parent.path + "/" + album.path);
};
PhotoFloat.photoPath = function(album, photo, size, square) {
var suffix;
if (square)
suffix = size.toString() + "s";
else
suffix = size.toString();
return "cache/" + PhotoFloat.cachePath(PhotoFloat.photoHash(album, photo) + "_" + suffix + ".jpg");
};
PhotoFloat.originalPhotoPath = function(album, photo) {
return "albums/" + album.path + "/" + photo.name;
};
PhotoFloat.trimExtension = function(name) {
var index = name.lastIndexOf(".");
if (index !== -1)
return name.substring(0, index);
return name;
};
PhotoFloat.cleanHash = function(hash) {
while (hash.length) {
if (hash.charAt(0) === "#")
hash = hash.substring(1);
else if (hash.charAt(0) === "!")
hash = hash.substring(1);
else if (hash.charAt(0) === "/")
hash = hash.substring(1);
else if (hash.substring(0, 3) === "%21")
hash = hash.substring(3);
else if (hash.charAt(hash.length - 1) === "/")
hash = hash.substring(0, hash.length - 1);
else
break;
}
return hash;
};
/* make static methods callable as member functions */
PhotoFloat.prototype.cachePath = PhotoFloat.cachePath;
PhotoFloat.prototype.photoHash = PhotoFloat.photoHash;
PhotoFloat.prototype.albumHash = PhotoFloat.albumHash;
PhotoFloat.prototype.photoPath = PhotoFloat.photoPath;
PhotoFloat.prototype.originalPhotoPath = PhotoFloat.originalPhotoPath;
PhotoFloat.prototype.trimExtension = PhotoFloat.trimExtension;
PhotoFloat.prototype.cleanHash = PhotoFloat.cleanHash;
/* expose class globally */
window.PhotoFloat = PhotoFloat;
}());<|fim▁end|> | break; |
<|file_name|>TestModuleLoadedNotifys.py<|end_file_name|><|fim▁begin|>"""
Test how many times newly loaded binaries are notified;
they should be delivered in batches instead of one-by-one.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ModuleLoadedNotifysTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
# DynamicLoaderDarwin should batch up notifications about
# newly added/removed libraries. Other DynamicLoaders may
# not be written this way.
@skipUnlessDarwin
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.line = line_number('main.cpp', '// breakpoint')
def test_launch_notifications(self):
"""Test that lldb broadcasts newly loaded libraries in batches."""
self.build()
exe = self.getBuildArtifact("a.out")
self.dbg.SetAsync(False)
listener = self.dbg.GetListener()
listener.StartListeningForEventClass(
self.dbg,
lldb.SBTarget.GetBroadcasterClassName(),
lldb.SBTarget.eBroadcastBitModulesLoaded | lldb.SBTarget.eBroadcastBitModulesUnloaded)
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# break on main
breakpoint = target.BreakpointCreateByName('main', 'a.out')
event = lldb.SBEvent()
# CreateTarget() generated modules-loaded events; consume them & toss
while listener.GetNextEvent(event):
True
error = lldb.SBError()
flags = target.GetLaunchInfo().GetLaunchFlags()
process = target.Launch(listener,
None, # argv
None, # envp
None, # stdin_path
None, # stdout_path
None, # stderr_path
None, # working directory
flags, # launch flags
False, # Stop at entry
error) # error
self.assertTrue(
process.GetState() == lldb.eStateStopped,
PROCESS_STOPPED)
total_solibs_added = 0<|fim▁hole|> if lldb.SBTarget.EventIsTargetEvent(event):
if event.GetType() == lldb.SBTarget.eBroadcastBitModulesLoaded:
solib_count = lldb.SBTarget.GetNumModulesFromEvent(event)
total_modules_added_events += 1
total_solibs_added += solib_count
if self.TraceOn():
# print all of the binaries that have been added
added_files = []
i = 0
while i < solib_count:
module = lldb.SBTarget.GetModuleAtIndexFromEvent(i, event)
added_files.append(module.GetFileSpec().GetFilename())
i = i + 1
print("Loaded files: %s" % (', '.join(added_files)))
if event.GetType() == lldb.SBTarget.eBroadcastBitModulesUnloaded:
solib_count = lldb.SBTarget.GetNumModulesFromEvent(event)
total_modules_removed_events += 1
total_solibs_removed += solib_count
if self.TraceOn():
# print all of the binaries that have been removed
removed_files = []
i = 0
while i < solib_count:
module = lldb.SBTarget.GetModuleAtIndexFromEvent(i, event)
removed_files.append(module.GetFileSpec().GetFilename())
i = i + 1
print("Unloaded files: %s" % (', '.join(removed_files)))
# This is testing that we get back a small number of events with the loaded
# binaries in batches. Check that we got back more than 1 solib per event.
# In practice on Darwin today, we get back two events for a do-nothing c
# program: a.out and dyld, and then all the rest of the system libraries.
avg_solibs_added_per_event = int(float(total_solibs_added) / float(total_modules_added_events))
self.assertGreater(avg_solibs_added_per_event, 1)<|fim▁end|> | total_solibs_removed = 0
total_modules_added_events = 0
total_modules_removed_events = 0
while listener.GetNextEvent(event): |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># Generated by Django 3.0.13 on 2021-05-19 21:21
<|fim▁hole|>
class Migration(migrations.Migration):
initial = True
dependencies = [
('products', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ProductPriceOffer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('not_reviewed', 'Not reviewed'), ('processing', 'Processing'), ('canceled', 'Canceled'), ('completed', 'Completed')], default='not_reviewed', max_length=50, verbose_name='Status')),
('name', models.CharField(max_length=255, verbose_name='Name')),
('mobile', models.CharField(max_length=255, verbose_name='Mobile phone')),
('email', models.EmailField(max_length=255, verbose_name='Email')),
('text', models.TextField(max_length=1000, verbose_name='Offer')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='price_offers', to='products.Product', verbose_name='Product')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='offers', to=settings.AUTH_USER_MODEL, verbose_name='User')),
],
options={
'verbose_name': 'Product price offer',
'verbose_name_plural': 'Product price offers',
'db_table': 'products_priceoffer',
'ordering': ['-date_created'],
},
),
]<|fim▁end|> | from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
|
<|file_name|>localfileview.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2010 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os.path
import gtk
import logging
import pango
from gtk import gdk
from locale import strcoll
from translate.lang import factory as lang_factory
from translate.storage import factory as store_factory
from virtaal.common.pan_app import ui_language
from virtaal.views.baseview import BaseView
from virtaal.views import rendering
from virtaal.views.theme import current_theme
class LocalFileView:
"""
Class that manages the localfile terminology plug-in's GUI presense and interaction.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.controller = model.controller
self.mainview = model.controller.main_controller.view
self._signal_ids = []
self._setup_menus()
self.addterm = TermAddDialog(model=model)
self.fileselect = FileSelectDialog(model=model)
# METHODS #
def _setup_menus(self):
mnu_transfer = self.mainview.gui.get_widget('mnu_placnext')
self.mnui_edit = self.mainview.gui.get_widget('menuitem_edit')
self.menu = self.mnui_edit.get_submenu()
self.mnu_select_files, _menu = self.mainview.find_menu_item(_('Terminology _Files...'), self.mnui_edit)
if not self.mnu_select_files:
self.mnu_select_files = self.mainview.append_menu_item(_('Terminology _Files...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_select_files,
self.mnu_select_files.connect('activate', self._on_select_term_files)
))
self.mnu_add_term, _menu = self.mainview.find_menu_item(_('Add _Term...'), self.mnui_edit)
if not self.mnu_add_term:
self.mnu_add_term = self.mainview.append_menu_item(_('Add _Term...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_add_term,
self.mnu_add_term.connect('activate', self._on_add_term)
))
gtk.accel_map_add_entry("<Virtaal>/Terminology/Add Term", gtk.keysyms.t, gdk.CONTROL_MASK)
accel_group = self.menu.get_accel_group()
if accel_group is None:
accel_group = gtk.AccelGroup()
self.menu.set_accel_group(accel_group)
self.mnu_add_term.set_accel_path("<Virtaal>/Terminology/Add Term")
self.menu.set_accel_group(accel_group)
def destroy(self):
for gobj, signal_id in self._signal_ids:
gobj.disconnect(signal_id)
self.menu.remove(self.mnu_select_files)
self.menu.remove(self.mnu_add_term)
# EVENT HANDLERS #
def _on_add_term(self, menuitem):
self.addterm.run(parent=self.mainview.main_window)
def _on_select_term_files(self, menuitem):
self.fileselect.run(parent=self.mainview.main_window)
class FileSelectDialog:
"""
Wrapper for the selection dialog, created in Glade, to manage the list of
files used by this plug-in.
"""
COL_FILE, COL_EXTEND = range(2)
# INITIALIZERS #
def __init__(self, model):
self.controller = model.controller
self.term_model = model
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermFilesDlg',
domain='virtaal'
)
self._get_widgets()
self._init_treeview()
self._init_add_chooser()
def _get_widgets(self):
widget_names = ('btn_add_file', 'btn_remove_file', 'btn_open_termfile', 'tvw_termfiles')
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermFilesDlg')
self.btn_add_file.connect('clicked', self._on_add_file_clicked)
self.btn_remove_file.connect('clicked', self._on_remove_file_clicked)
self.btn_open_termfile.connect('clicked', self._on_open_termfile_clicked)
self.tvw_termfiles.get_selection().connect('changed', self._on_selection_changed)
def _init_treeview(self):
self.lst_files = gtk.ListStore(str, bool)
self.tvw_termfiles.set_model(self.lst_files)
cell = gtk.CellRendererText()
cell.props.ellipsize = pango.ELLIPSIZE_MIDDLE
col = gtk.TreeViewColumn(_('File'))
col.pack_start(cell)
col.add_attribute(cell, 'text', self.COL_FILE)
col.set_expand(True)
col.set_sort_column_id(0)
self.tvw_termfiles.append_column(col)
cell = gtk.CellRendererToggle()
cell.set_radio(True)
cell.connect('toggled', self._on_toggle)
col = gtk.TreeViewColumn(_('Extendable'))
col.pack_start(cell)
col.add_attribute(cell, 'active', self.COL_EXTEND)
col.set_expand(False)
self.tvw_termfiles.append_column(col)
extend_file = self.term_model.config.get('extendfile', '')
files = self.term_model.config['files']
for f in files:
self.lst_files.append([f, f == extend_file])
# If there was no extend file, select the first one
for row in self.lst_files:
if row[self.COL_EXTEND]:
break
else:
itr = self.lst_files.get_iter_first()
if itr and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = self.lst_files.get_value(itr, self.COL_FILE)
self.term_model.save_config()
def _init_add_chooser(self):
# The following code was mostly copied from virtaal.views.MainView._create_dialogs()
dlg = gtk.FileChooserDialog(
_('Add Files'),
self.controller.main_controller.view.main_window,
gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)
)
dlg.set_default_response(gtk.RESPONSE_OK)
all_supported_filter = gtk.FileFilter()
all_supported_filter.set_name(_("All Supported Files"))
dlg.add_filter(all_supported_filter)
supported_files_dict = dict([ (_(name), (extension, mimetype)) for name, extension, mimetype in store_factory.supported_files() ])
supported_file_names = supported_files_dict.keys()
supported_file_names.sort(cmp=strcoll)
for name in supported_file_names:
extensions, mimetypes = supported_files_dict[name]
#XXX: we can't open generic .csv formats, so listing it is probably
# more harmful than good.
if "csv" in extensions:
continue
new_filter = gtk.FileFilter()
new_filter.set_name(name)
if extensions:
for extension in extensions:
new_filter.add_pattern("*." + extension)
all_supported_filter.add_pattern("*." + extension)
for compress_extension in store_factory.decompressclass.keys():
new_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
all_supported_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
if mimetypes:
for mimetype in mimetypes:
new_filter.add_mime_type(mimetype)
all_supported_filter.add_mime_type(mimetype)
dlg.add_filter(new_filter)
all_filter = gtk.FileFilter()
all_filter.set_name(_("All Files"))
all_filter.add_pattern("*")
dlg.add_filter(all_filter)
dlg.set_select_multiple(True)
self.add_chooser = dlg
# METHODS #
def clear_selection(self):
self.tvw_termfiles.get_selection().unselect_all()
def run(self, parent=None):
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.clear_selection()
self.dialog.show_all()
self.dialog.run()
self.dialog.hide()
# EVENT HANDLERS #
def _on_add_file_clicked(self, button):
self.add_chooser.show_all()
response = self.add_chooser.run()
self.add_chooser.hide()
if response != gtk.RESPONSE_OK:
return
mainview = self.term_model.controller.main_controller.view
currfiles = [row[self.COL_FILE] for row in self.lst_files]
for filename in self.add_chooser.get_filenames():
if filename in currfiles:
continue
# Try and open filename as a translation store
try:
if not os.path.isfile(filename):
raise IOError(_('"%s" is not a usable file.') % filename)
store = store_factory.getobject(filename)
currfiles.append(filename)
self.lst_files.append([filename, False])
except Exception, exc:
message = _('Unable to load %(filename)s:\n\n%(errormsg)s') % {'filename': filename, 'errormsg': str(exc)}
mainview.show_error_dialog(title=_('Error opening file'), message=message)
self.term_model.config['files'] = currfiles
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only load and add the new selected files.
def _on_remove_file_clicked(self, button):
model, selected = self.tvw_termfiles.get_selection().get_selected()
if not selected:
return
remfile = model.get_value(selected, self.COL_FILE)
extend = model.get_value(selected, self.COL_EXTEND)
self.term_model.config['files'].remove(remfile)
if extend:
self.term_model.config['extendfile'] = ''
itr = model.get_iter_first()
if itr and model.iter_is_valid(itr):
model.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = model.get_value(itr, self.COL_FILE)
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only remove the selected file from the terminology matcher.
model.remove(selected)
def _on_open_termfile_clicked(self, button):
selection = self.tvw_termfiles.get_selection()
model, itr = selection.get_selected()
if itr is None:
return
selected_file = model.get_value(itr, self.COL_FILE)
self.term_model.controller.main_controller.open_file(selected_file)
def _on_selection_changed(self, treesel):
model, itr = treesel.get_selected()
enabled = itr is not None
self.btn_open_termfile.set_sensitive(enabled)
self.btn_remove_file.set_sensitive(enabled)
def _on_toggle(self, renderer, path):
toggled_file = self.lst_files.get_value(self.lst_files.get_iter(path), self.COL_FILE)
itr = self.lst_files.get_iter_first()
while itr is not None and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, self.lst_files.get_value(itr, self.COL_FILE) == toggled_file)
itr = self.lst_files.iter_next(itr)
self.term_model.config['extendfile'] = toggled_file
self.term_model.save_config()
class TermAddDialog:
"""
Wrapper for the dialog used to add a new term to the terminology file.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.lang_controller = model.controller.main_controller.lang_controller
self.unit_controller = model.controller.main_controller.unit_controller
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermAddDlg',
domain='virtaal'
)
self._get_widgets()
def _get_widgets(self):
widget_names = (
'btn_add_term', 'cmb_termfile', 'eb_add_term_errors', 'ent_source',
'ent_target', 'lbl_add_term_errors', 'lbl_srclang', 'lbl_tgtlang',
'txt_comment'
)
<|fim▁hole|> for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermAddDlg')
cellr = gtk.CellRendererText()
cellr.props.ellipsize = pango.ELLIPSIZE_MIDDLE
self.lst_termfiles = gtk.ListStore(str)
self.cmb_termfile.set_model(self.lst_termfiles)
self.cmb_termfile.pack_start(cellr)
self.cmb_termfile.add_attribute(cellr, 'text', 0)
self.ent_source.connect('changed', self._on_entry_changed)
self.ent_target.connect('changed', self._on_entry_changed)
# METHODS #
def add_term_unit(self, source, target):
filename = self.cmb_termfile.get_active_text()
store = self.term_model.get_store_for_filename(filename)
if store is None:
logging.debug('No terminology store to extend :(')
return
unit = store.addsourceunit(source)
unit.target = target
buff = self.txt_comment.get_buffer()
comments = buff.get_text(buff.get_start_iter(), buff.get_end_iter())
if comments:
unit.addnote(comments)
store.save()
self.term_model.matcher.extendtm(unit)
#logging.debug('Added new term: [%s] => [%s], file=%s' % (source, target, store.filename))
def reset(self):
unitview = self.unit_controller.view
source_text = u''
for src in unitview.sources:
selection = src.buffer.get_selection_bounds()
if selection:
source_text = src.get_text(*selection)
break
self.ent_source.modify_font(rendering.get_source_font_description())
self.ent_source.set_text(source_text.strip())
target_text = u''
for tgt in unitview.targets:
selection = tgt.buffer.get_selection_bounds()
if selection:
target_text = tgt.get_text(*selection)
break
self.ent_target.modify_font(rendering.get_target_font_description())
self.ent_target.set_text(target_text.strip())
self.txt_comment.get_buffer().set_text('')
self.eb_add_term_errors.hide()
self.btn_add_term.props.sensitive = True
self.lbl_srclang.set_text_with_mnemonic(_(u'_Source term — %(langname)s') % {'langname': self.lang_controller.source_lang.name})
self.lbl_tgtlang.set_text_with_mnemonic(_(u'_Target term — %(langname)s') % {'langname': self.lang_controller.target_lang.name})
self.lst_termfiles.clear()
extendfile = self.term_model.config.get('extendfile', None)
select_index = -1
i = 0
for f in self.term_model.config['files']:
if f == extendfile:
select_index = i
self.lst_termfiles.append([f])
i += 1
if select_index >= 0:
self.cmb_termfile.set_active(select_index)
def run(self, parent=None):
self.reset()
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.dialog.show()
self._on_entry_changed(None)
self.ent_source.grab_focus()
response = self.dialog.run()
self.dialog.hide()
if response != gtk.RESPONSE_OK:
return
self.add_term_unit(self.ent_source.get_text(), self.ent_target.get_text())
# EVENT HANDLERS #
def _on_entry_changed(self, entry):
self.btn_add_term.props.sensitive = True
self.eb_add_term_errors.hide()
src_text = self.ent_source.get_text()
tgt_text = self.ent_target.get_text()
dup = self.term_model.get_duplicates(src_text, tgt_text)
if dup:
self.lbl_add_term_errors.set_text(_('Identical entry already exists.'))
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
self.btn_add_term.props.sensitive = False
return
same_src_units = self.term_model.get_units_with_source(src_text)
if src_text and same_src_units:
# We want to separate multiple terms with the correct list
# separator for the UI language:
separator = lang_factory.getlanguage(ui_language).listseperator
#l10n: The variable is an existing term formatted for emphasis. The default is bold formatting, but you can remove/change the markup if needed. Leave it unchanged if you are unsure.
translations = separator.join([_('<b>%s</b>') % (u.target) for u in same_src_units])
errormsg = _('Existing translations: %(translations)s') % {
'translations': translations
}
self.lbl_add_term_errors.set_markup(errormsg)
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
return<|fim▁end|> | |
<|file_name|>sqLiteProviderTests.js<|end_file_name|><|fim▁begin|>$(document).ready(function () {
if (!$data.StorageProviderLoader.isSupported('sqLite')) return;
module("sqLiteProviderTest");
test("memberDefinition converter", function(){
$data.EntityContext.extend('$conv.ConverterTest', {
Items: { type: $data.EntitySet, elementType: $data.Entity.extend('$conv.Item', {
Id: { type: 'int', key: true, computed: true },
Value: { type: 'string', converter: {
sqLite: {
fromDb: function(value){
return 'Value #' + value;
},
toDb: function(value){
return value.replace('Value #', '');
}
}
}
}
})
}
});
stop();
var c = new $conv.ConverterTest({ name: 'sqLite', databaseName: 'conv', dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables });
c.onReady(function(db){
db.Items.add({ Value: 'Value #3' });
db.saveChanges(function(cnt){
equal(cnt, 1, 'not 1 item saved');
db.Items.toArray(function(r){
equal(r.length, 1, 'not 1 item in table');
r = r[0];
ok(r instanceof $conv.Item, 'not $conv.Item');
equal(r.Value, 'Value #3', 'bad value');
start();
})
});
});
});
test("simpleFieldDataTypeTest", function () {
var context = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
SimpleDataTypes: {
dataType: $data.EntitySet, elementType: $data.Class.define("SimpleDataType", $data.Entity, null, {
i0: { dataType: 'integer' },
i1: { dataType: 'int' },
b0: { dataType: 'bool' },
b1: { dataType: 'boolean' },
t0: { dataType: 'text' },
t1: { dataType: 'string' },
bl: { dataType: 'blob' },
n0: { dataType: 'number' },
d1: { dataType: 'datetime' },
d2: { dataType: 'date' }
}, null)
}
}, null);
var c = new context({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables })
var provType = $data.storageProviders.sqLite.SqLiteStorageProvider;
var prov = new provType({ databaseName: "ProviderTestDb", name: "sqLite" });
var sql = prov.createSqlFromStorageModel(c._storageModel[0]);
equal(sql, 'CREATE TABLE IF NOT EXISTS [SimpleDataTypes] ([i0] INTEGER, [i1] INTEGER, [b0] INTEGER, [b1] INTEGER, [t0] TEXT, [t1] TEXT, [bl] BLOB, [n0] REAL, [d1] REAL, [d2] REAL);', 'create table function faild');
});
test("requiredFieldTest", function () {
var context = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
SimpleDataTypes: {
dataType: $data.EntitySet, elementType: $data.Class.define("SimpleDataType", $data.Entity, null, {
i0: { dataType: 'integer', required: true },
i1: { dataType: 'int', required: false },
b0: { dataType: 'bool', required: true },
b1: { dataType: 'boolean', required: false },
t0: { dataType: 'text', required: true },
t1: { dataType: 'string', required: false },
bl0: { dataType: 'blob', required: true },
bl1: { dataType: 'blob', required: false },
n0: { dataType: 'number', required: true },
n1: { dataType: 'number', required: false },
d1: { dataType: 'datetime', required: true },
d2: { dataType: 'date', required: false }
}, null)
},
RequireWithKeys: {
dataType: $data.EntitySet, elementType: $data.Class.define("RequireWithKey", $data.Entity, null, {
i0: { dataType: 'integer', required: true, key: true },
i1: { dataType: 'int', required: true }
}, null)
},
RequireWithMultipleKeys: {
dataType: $data.EntitySet, elementType: $data.Class.define("RequireWithMultipleKey", $data.Entity, null, {
i0: { dataType: 'integer', required: true, key: true },
i1: { dataType: 'int', required: true, key: true }
}, null)
},
RequireWithComputeds: {
dataType: $data.EntitySet, elementType: $data.Class.define("RequireWithComputed", $data.Entity, null, {
i0: { dataType: 'integer', required: true, key: true, computed: true },
i1: { dataType: 'int', required: true }
}, null)
}
}, null);
var c = new context({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables })
var provType = $data.storageProviders.sqLite.SqLiteStorageProvider;
var prov = new provType({ databaseName: "ProviderTestDb", name: "sqLite" });
var sql = prov.createSqlFromStorageModel(c._storageModel[0]);
equal(sql, 'CREATE TABLE IF NOT EXISTS [SimpleDataTypes] ([i0] INTEGER NOT NULL, [i1] INTEGER, [b0] INTEGER NOT NULL, [b1] INTEGER, [t0] TEXT NOT NULL, [t1] TEXT, [bl0] BLOB NOT NULL, [bl1] BLOB, [n0] REAL NOT NULL, [n1] REAL, [d1] REAL NOT NULL, [d2] REAL);', 'create table function faild');
var sql = prov.createSqlFromStorageModel(c._storageModel[1]);
equal(sql, 'CREATE TABLE IF NOT EXISTS [RequireWithKeys] ([i0] INTEGER NOT NULL, [i1] INTEGER NOT NULL,PRIMARY KEY ([i0]));', 'required with key');
var sql = prov.createSqlFromStorageModel(c._storageModel[2]);
equal(sql, 'CREATE TABLE IF NOT EXISTS [RequireWithMultipleKeys] ([i0] INTEGER NOT NULL, [i1] INTEGER NOT NULL,PRIMARY KEY ([i0], [i1]));', 'required with multiple key');
var sql = prov.createSqlFromStorageModel(c._storageModel[3]);
equal(sql, 'CREATE TABLE IF NOT EXISTS [RequireWithComputeds] ([i0] INTEGER PRIMARY KEY AUTOINCREMENT, [i1] INTEGER NOT NULL);', 'required with computed field');
});
module("sqLiteProviderTest_createDb");
test("attach_db", function () {
var context = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
Table1Items: {
dataType: $data.EntitySet, elementType: $data.Class.define("TestEntity", $data.Entity, null, {
fld1: { dataType: 'integer', key: true },
fld2: { dataType: 'string' }
}, null)
}
}, null);
var context2 = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
Table1Items: {
dataType: $data.EntitySet, elementType: $data.Class.define("TestEntity", $data.Entity, null, {
fld1: { dataType: 'integer', key: true },
fld2: { dataType: 'string' }
}, null)
}
}, null);
stop(2);
expect(14);
var c = new context({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables }).onReady(function (db) {
$data.Db1 = db;
$data.Db1.name = 'db1';
db.Table1Items.add(new db.Table1Items.createNew({ fld1: 1, fld2: 'alma' }));
db.Table1Items.add(new db.Table1Items.createNew({ fld1: 2, fld2: 'alma2' }));
db.saveChanges(function () {
db.Table1Items.where(function (item) { return item.fld1 > 0; }, null).orderBy(function (item) { return item.fld1; }).toArray(function (result) {
start();
equal(result.length, 2, "Db cleanup error");
equal(result[0].fld1, 1, "Inconsistency data in db");
equal(result[0].fld2, 'alma', "Inconsistency data in db");
equal(result[1].fld1, 2, "Inconsistency data in db");
equal(result[1].fld2, 'alma2', "Inconsistency data in db");
new context2({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropTableIfChanged }).onReady(function (reconnectedDb) {
$data.Db2 = reconnectedDb;
$data.Db2.name = 'db2';
reconnectedDb.Table1Items.add(new reconnectedDb.Table1Items.createNew({ fld1: 3, fld2: 'alma3' }));
reconnectedDb.Table1Items.add(new reconnectedDb.Table1Items.createNew({ fld1: 4, fld2: 'alma4' }));
reconnectedDb.saveChanges(function () {
reconnectedDb.Table1Items.where(function (item) { return item.fld1 > 0; }, null).orderBy(function (item) { return item.fld1; }).toArray(function (result2) {
start();
equal(result2.length, 4, "Db cleanup error");
equal(result2[0].fld1, 1, "Inconsistency data in db");
equal(result2[0].fld2, 'alma', "Inconsistency data in db");
equal(result2[1].fld1, 2, "Inconsistency data in db");
equal(result2[1].fld2, 'alma2', "Inconsistency data in db");
equal(result2[2].fld1, 3, "Inconsistency data in db");
equal(result2[2].fld2, 'alma3', "Inconsistency data in db");
equal(result2[3].fld1, 4, "Inconsistency data in db");
equal(result2[3].fld2, 'alma4', "Inconsistency data in db");
});
});
});
});
});
});
});
test("schema_extend", function () {
var context = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
Table1Items: {
dataType: $data.EntitySet, elementType: $data.Class.define("TestEntity", $data.Entity, null, {
fld1: { dataType: 'integer', key: true },
fld2: { dataType: 'string' }
}, null)
}
}, null);
var context2 = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
Table1Items: {
dataType: $data.EntitySet, elementType: $data.Class.define("TestEntity", $data.Entity, null, {
fld1: { dataType: 'integer', key: true },
fld2: { dataType: 'string' }
}, null)
},
Table2Items: {
dataType: $data.EntitySet, elementType: $data.Class.define("TestEntity2", $data.Entity, null, {
fld1: { dataType: 'integer', key: true },
fld2: { dataType: 'string' }
}, null)
}
}, null);
stop(3);
expect(17);
var c = new context({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables }).onReady(function (db) {
$data.Db1 = db;
$data.Db1.name = 'db1';
db.Table1Items.add(new db.Table1Items.createNew({ fld1: 1, fld2: 'alma' }));
db.Table1Items.add(new db.Table1Items.createNew({ fld1: 2, fld2: 'alma2' }));
db.saveChanges(function () {
db.Table1Items.where(function (item, param) { return item.fld1 > 0; }, null).orderBy(function (item) { return item.fld1; }).toArray(function (result) {
start();
equal(result.length, 2, "Db cleanup error");
equal(result[0].fld1, 1, "Inconsistency data in db");
equal(result[0].fld2, 'alma', "Inconsistency data in db");
equal(result[1].fld1, 2, "Inconsistency data in db");
equal(result[1].fld2, 'alma2', "Inconsistency data in db");
var c2 = new context2({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropTableIfChanged }).onReady(function (reconnectedDb) {
$data.Db2 = reconnectedDb;
$data.Db2.name = 'db2';
reconnectedDb.Table1Items.add(new reconnectedDb.Table1Items.createNew({ fld1: 3, fld2: 'alma3' }));
reconnectedDb.Table1Items.add(new reconnectedDb.Table1Items.createNew({ fld1: 4, fld2: 'alma4' }));
reconnectedDb.Table2Items.add(new reconnectedDb.Table2Items.createNew({ fld1: 1, fld2: 'alma' }));
reconnectedDb.saveChanges(function () {
reconnectedDb.Table1Items.where(function (item, param) { return item.fld1 > 0; }, null).orderBy(function (item) { return item.fld1; }).toArray(function (result2) {
start();
equal(result2.length, 4, "Db cleanup error");
equal(result2[0].fld1, 1, "Inconsistency data in db");
equal(result2[0].fld2, 'alma', "Inconsistency data in db");
equal(result2[1].fld1, 2, "Inconsistency data in db");
equal(result2[1].fld2, 'alma2', "Inconsistency data in db");
equal(result2[2].fld1, 3, "Inconsistency data in db");
equal(result2[2].fld2, 'alma3', "Inconsistency data in db");
equal(result2[3].fld1, 4, "Inconsistency data in db");
equal(result2[3].fld2, 'alma4', "Inconsistency data in db");
});
reconnectedDb.Table2Items.where(function (item, param) { return item.fld1 > 0; }, null).orderBy(function (item) { return item.fld1; }).toArray(function (result3) {
start();
equal(result3.length, 1, "Db cleanup error");
equal(result3[0].fld1, 1, "Inconsistency data in db");
equal(result3[0].fld2, 'alma', "Inconsistency data in db");
});
});
});
});
});
});
});
test("schema_changed", function () {
var context = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
Table1Items: {
dataType: $data.EntitySet, elementType: $data.Class.define("TestEntity", $data.Entity, null, {
fld1: { dataType: 'integer', key: true },
fld2: { dataType: 'string' }
}, null)
}
}, null);
var context2 = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
Table1Items: {
dataType: $data.EntitySet, elementType: $data.Class.define("TestEntity", $data.Entity, null, {
fld1: { dataType: 'integer', key: true },
fld2: { dataType: 'string' },
fld3: { dataType: 'string' }
}, null)
}
}, null);
stop(2);
expect(10);
var c = new context({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables }).onReady(function (db) {
$data.Db1 = db;
$data.Db1.name = 'db1';
db.Table1Items.add(new db.Table1Items.createNew({ fld1: 1, fld2: 'alma' }));
db.Table1Items.add(new db.Table1Items.createNew({ fld1: 2, fld2: 'alma2' }));
db.saveChanges(function () {
db.Table1Items.where(function (item, param) { return item.fld1 > 0; }, null).orderBy(function (item) { return item.fld1; }).toArray(function (result) {
start();
equal(result.length, 2, "Db cleanup error");
equal(result[0].fld1, 1, "Inconsistency data in db");
equal(result[0].fld2, 'alma', "Inconsistency data in db");
equal(result[1].fld1, 2, "Inconsistency data in db");
equal(result[1].fld2, 'alma2', "Inconsistency data in db");
var c2 = new context2({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropTableIfChanged }).onReady(function (reconnectedDb) {
$data.Db2 = reconnectedDb;
$data.Db2.name = 'db2';
reconnectedDb.Table1Items.add(new reconnectedDb.Table1Items.createNew({ fld1: 3, fld2: 'alma3' }));
reconnectedDb.Table1Items.add(new reconnectedDb.Table1Items.createNew({ fld1: 4, fld2: 'alma4' }));
reconnectedDb.saveChanges(function () {
reconnectedDb.Table1Items.where(function (item, param) { return item.fld1 > 0; }, null).orderBy(function (item) { return item.fld1; }).toArray(function (result2) {
start();
equal(result2.length, 2, "Db cleanup error");
equal(result2[0].fld1, 3, "Inconsistency data in db");
equal(result2[0].fld2, 'alma3', "Inconsistency data in db");
equal(result2[1].fld1, 4, "Inconsistency data in db");
equal(result2[1].fld2, 'alma4', "Inconsistency data in db");
});
});
});
});
});
});
});
module("sqLiteProviderTest_relations");
test("relation_with_simple_int_pk", function () {
var context = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
Persons: {
dataType: $data.EntitySet, elementType: $data.Class.define("Person", $data.Entity, null, {
Id: { dataType: 'integer', key: true },
Name: { dataType: 'string' },
MyBlogs: { dataType: $data.EntitySet, elementType: "Blog", inverseProperty: 'Owner' }
//EditableBlogs: { dataType: 'EntitySet', elementType: "Blog", foreignKey: 'Administrators' },
//DefaultBlog: { dataType: 'Blog' }
}, null)
},
Blogs: {
dataType: $data.EntitySet, elementType: $data.Class.define("Blog", $data.Entity, null, {
Id: { dataType: 'integer', key: true },
Title: { dataType: 'string' },
Owner: { dataType: 'Person', inverseProperty: 'MyBlogs' }
//Administrators: { dataType: 'EntitySet', elementType: "Blog", foreignKey: 'EditableBlogs' },
//DefaultUser: { dataType: 'Person', foreignKey: 'DefaultBlog' }
}, null)
}
}, null);
stop(1);
expect(1);
var c = new context({ databaseName: "sqLiteProviderTest_relations", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables }).onReady(function (db) {
$data.db = db;
var person1 = new db.Persons.createNew({ Id: 1, Name: 'person1' });
$data.person1 = person1;
//console.dir(person1);
var person2 = new db.Persons.createNew({ Id: 2, Name: 'person2' });
var person3 = new db.Persons.createNew({ Id: 3, Name: 'person3' });
db.Persons.add(person1);
db.Persons.add(person2);
db.Persons.add(person3);
var blog1 = new db.Blogs.createNew({ Id: 1, Title: 'blog1' });
var blog2 = new db.Blogs.createNew({ Id: 2, Title: 'blog2' });
var blog3 = new db.Blogs.createNew({ Id: 3, Title: 'blog3' });
db.Blogs.add(blog1);
db.Blogs.add(blog2);
db.Blogs.add(blog3);
person1.DefaultBlog = blog3;
db.saveChanges(function () {
start();
ok(true, 'Initialize faild');
var person4 = new db.Persons.createNew({ Id: 3, Name: 'person3', DefaultBlog: blog1 });
//console.dir(person4);
});
});<|fim▁hole|> test("filter_in_subselect", function () {
var context = $data.EntityContext.extend("ProviderTestContext", {
Categories: {
type: $data.EntitySet, elementType: $data.Entity.extend("Category", {
Id: { type: 'int', key: true, computed: true },
Title: { type: 'string' },
Articles: { type: $data.EntitySet, elementType: "Article", inverseProperty: 'Category' }
})
},
Articles: {
type: $data.EntitySet, elementType: $data.Entity.extend("Article", {
Id: { type: 'int', key: true, computed: true },
Title: { type: 'string' },
Category: { type: 'Category', required: true }
})
}
});
stop(2);
expect(1);
var c = new context({ databaseName: "sqLiteProviderTest_subselect", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables }).onReady(function (db) {
var catA = new Category({ Title: 'CatA' });
var catB = new Category({ Title: 'CatB' });
db.Categories.add(catA);
db.Categories.add(catB);
var artA = new Article({ Title: 'ArtA', Category: catA });
var artB = new Article({ Title: 'ArtB', Category: catA });
var artC = new Article({ Title: 'ArtC', Category: catB });
db.Articles.add(artA);
db.Articles.add(artB);
db.Articles.add(artC);
db.saveChanges(function () {
start(1);
try{
db.Categories
.filter(function(item){
return item.Id in this.category;
}, {
category: db.Articles
.map(function(item){ return item.Category.Id; })
})
.toArray(function(){
start(1);
ok(true, 'Query failed');
});
}catch(e){
start(1);
ok(false, 'Query failed');
}
});
});
});
/*test("computedFieldTest", function () {
expect(6);
var context = $data.Class.define("ProviderTestContext", $data.EntityContext, null, {
ComputedWithPrimaryKeys: { dataType: $data.EntitySet, elementType: $data.Class.define("ComputedWithPrimaryKey", $data.Entity, null, {
i0: { dataType: 'integer', key: true, computed: true },
i1: { dataType: 'int' },
i2: { dataType: 'int' }
}, null)
},
TwoPrimaryKeys: { dataType: $data.EntitySet, elementType: $data.Class.define("TwoPrimaryKey", null, null, {
i0: { dataType: 'integer', key: true },
i1: { dataType: 'int', key: true },
i2: { dataType: 'int' }
}, null)
},
TwoPrimaryKeyWithComputed1s: { dataType: $data.EntitySet, elementType: $data.Class.define("TwoPrimaryKeyWithComputed1", $data.Entity, null, {
i0: { dataType: 'integer', key: true, computed: true },
i1: { dataType: 'int', key: true },
i2: { dataType: 'int' }
}, null)
},
TwoPrimaryKeyWithComputed2s: { dataType: $data.EntitySet, elementType: $data.Class.define("TwoPrimaryKeyWithComputed2", $data.Entity, null, {
i0: { dataType: 'integer', computed: true },
i1: { dataType: 'int', key: true, computed: true },
i2: { dataType: 'int' }
}, null)
},
ComputedFieldWithoutKeys: { dataType: $data.EntitySet, elementType: $data.Class.define("ComputedFieldWithoutKey", $data.Entity, null, {
i0: { dataType: 'integer' },
i1: { dataType: 'int', computed: true },
i2: { dataType: 'int' }
}, null)
},
MultipleComputedAndKeys: { dataType: $data.EntitySet, elementType: $data.Class.define("MultipleComputedAndKey", $data.Entity, null, {
i0: { dataType: 'integer', key: true, computed: true },
i1: { dataType: 'int', key: true, computed: true },
i2: { dataType: 'int' }
}, null)
}
}, null);
var c = new context({ databaseName: "sqLiteProviderTest_createDb", name: "sqLite", dbCreation: $data.storageProviders.DbCreationType.DropAllExistingTables })
var provType = $data.storageProviders.sqLite.SqLiteStorageProvider;
var prov = new provType({ databaseName: "ProviderTestDb", name: "sqLite" });
var sql1 = prov.createSqlFromStorageModel(context.ComputedWithPrimaryKeys);
equal(sql1, 'CREATE TABLE IF NOT EXISTS ComputedWithPrimaryKeys (i0 INTEGER PRIMARY KEY AUTOINCREMENT, i1 INTEGER, i2 INTEGER);', 'primary key with computed field faild');
var sql2 = prov.createSqlFromStorageModel(c.TwoPrimaryKeys);
equal(sql2, 'CREATE TABLE IF NOT EXISTS TwoPrimaryKeys (i0 INTEGER, i1 INTEGER, i2 INTEGER,PRIMARY KEY (i0, i1));', 'two primary key faild');
raises(function () { prov.createSqlFromStorageModel(c.TwoPrimaryKeyWithComputed1s) }, Exception, 'multiple primary key with computed field');
raises(function () { prov.createSqlFromStorageModel(c.TwoPrimaryKeyWithComputed2s) }, Exception, 'multiple primary key with computed field');
raises(function () { prov.createSqlFromStorageModel(c.ComputedFieldWithoutKeys) }, Exception, 'computed field without key field');
//raises(function () { prov.createSqlFromStorageModel(c.MultipleComputedAndKeys) }, Exception, 'multiple computed & key fields');
});*/
});<|fim▁end|> | });
module("sqLiteProviderTest_in_subselect"); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""
leap/email/imap/tests/__init__.py
----------------------------------
Module intialization file for leap.mx.tests, a module containing unittesting
code, using twisted.trial, for testing leap_mx.
@authors: Kali Kaneko, <[email protected]>
@license: GPLv3, see included LICENSE file
@copyright: © 2013 Kali Kaneko, see COPYLEFT file
"""
import os
import u1db
from leap.common.testing.basetest import BaseLeapTest
from leap.soledad.client import Soledad
from leap.soledad.common.document import SoledadDocument
__all__ = ['test_imap']
def run():
"""xxx fill me in"""
pass
# -----------------------------------------------------------------------------
# Some tests inherit from BaseSoledadTest in order to have a working Soledad
# instance in each test.
# -----------------------------------------------------------------------------
class BaseSoledadIMAPTest(BaseLeapTest):
"""
Instantiates GPG and Soledad for usage in LeapIMAPServer tests.
Copied from BaseSoledadTest, but moving setup to classmethod
"""
def setUp(self):
# open test dbs
self.db1_file = os.path.join(
self.tempdir, "db1.u1db")
self.db2_file = os.path.join(
self.tempdir, "db2.u1db")
self._db1 = u1db.open(self.db1_file, create=True,
document_factory=SoledadDocument)
self._db2 = u1db.open(self.db2_file, create=True,
document_factory=SoledadDocument)
# soledad config info
self.email = '[email protected]'
secrets_path = os.path.join(
self.tempdir, Soledad.STORAGE_SECRETS_FILE_NAME)
local_db_path = os.path.join(
self.tempdir, Soledad.LOCAL_DATABASE_FILE_NAME)
server_url = ''
cert_file = None
self._soledad = self._soledad_instance(
self.email, '123',
secrets_path=secrets_path,
local_db_path=local_db_path,
server_url=server_url,
cert_file=cert_file)
def _soledad_instance(self, uuid, passphrase, secrets_path, local_db_path,
server_url, cert_file):
"""
Return a Soledad instance for tests.
"""
# mock key fetching and storing so Soledad doesn't fail when trying to
# reach the server.
Soledad._fetch_keys_from_shared_db = Mock(return_value=None)
Soledad._assert_keys_in_shared_db = Mock(return_value=None)
# instantiate soledad
def _put_doc_side_effect(doc):
self._doc_put = doc
class MockSharedDB(object):
get_doc = Mock(return_value=None)
put_doc = Mock(side_effect=_put_doc_side_effect)
def __call__(self):
return self
Soledad._shared_db = MockSharedDB()
return Soledad(
uuid,
passphrase,
secrets_path=secrets_path,
local_db_path=local_db_path,
server_url=server_url,
cert_file=cert_file,
)
def tearDown(self):
self._db1.close()
self._db2.close()
self._soledad.close()
# Key material for testing
KEY_FINGERPRINT = "E36E738D69173C13D709E44F2F455E2824D18DDF"
PUBLIC_KEY = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG v1.4.10 (GNU/Linux)
mQINBFC9+dkBEADNRfwV23TWEoGc/x0wWH1P7PlXt8MnC2Z1kKaKKmfnglVrpOiz
iLWoiU58sfZ0L5vHkzXHXCBf6Eiy/EtUIvdiWAn+yASJ1mk5jZTBKO/WMAHD8wTO
zpMsFmWyg3xc4DkmFa9KQ5EVU0o/nqPeyQxNMQN7px5pPwrJtJFmPxnxm+aDkPYx
irDmz/4DeDNqXliazGJKw7efqBdlwTHkl9Akw2gwy178pmsKwHHEMOBOFFvX61AT
huKqHYmlCGSliwbrJppTG7jc1/ls3itrK+CWTg4txREkSpEVmfcASvw/ZqLbjgfs
d/INMwXnR9U81O8+7LT6yw/ca4ppcFoJD7/XJbkRiML6+bJ4Dakiy6i727BzV17g
wI1zqNvm5rAhtALKfACha6YO43aJzairO4II1wxVHvRDHZn2IuKDDephQ3Ii7/vb
hUOf6XCSmchkAcpKXUOvbxm1yfB1LRa64mMc2RcZxf4mW7KQkulBsdV5QG2276lv
U2UUy2IutXcGP5nXC+f6sJJGJeEToKJ57yiO/VWJFjKN8SvP+7AYsQSqINUuEf6H
T5gCPCraGMkTUTPXrREvu7NOohU78q6zZNaL3GW8ai7eSeANSuQ8Vzffx7Wd8Y7i
Pw9sYj0SMFs1UgjbuL6pO5ueHh+qyumbtAq2K0Bci0kqOcU4E9fNtdiovQARAQAB
tBxMZWFwIFRlc3QgS2V5IDxsZWFwQGxlYXAuc2U+iQI3BBMBCAAhBQJQvfnZAhsD
BQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEC9FXigk0Y3fT7EQAKH3IuRniOpb
T/DDIgwwjz3oxB/W0DDMyPXowlhSOuM0rgGfntBpBb3boezEXwL86NPQxNGGruF5
hkmecSiuPSvOmQlqlS95NGQp6hNG0YaKColh+Q5NTspFXCAkFch9oqUje0LdxfSP
QfV9UpeEvGyPmk1I9EJV/YDmZ4+Djge1d7qhVZInz4Rx1NrSyF/Tc2EC0VpjQFsU
Y9Kb2YBBR7ivG6DBc8ty0jJXi7B4WjkFcUEJviQpMF2dCLdonCehYs1PqsN1N7j+
eFjQd+hqVMJgYuSGKjvuAEfClM6MQw7+FmFwMyLgK/Ew/DttHEDCri77SPSkOGSI
txCzhTg6798f6mJr7WcXmHX1w1Vcib5FfZ8vTDFVhz/XgAgArdhPo9V6/1dgSSiB
KPQ/spsco6u5imdOhckERE0lnAYvVT6KE81TKuhF/b23u7x+Wdew6kK0EQhYA7wy
7LmlaNXc7rMBQJ9Z60CJ4JDtatBWZ0kNrt2VfdDHVdqBTOpl0CraNUjWE5YMDasr
K2dF5IX8D3uuYtpZnxqg0KzyLg0tzL0tvOL1C2iudgZUISZNPKbS0z0v+afuAAnx
2pTC3uezbh2Jt8SWTLhll4i0P4Ps5kZ6HQUO56O+/Z1cWovX+mQekYFmERySDR9n
3k1uAwLilJmRmepGmvYbB8HloV8HqwgguQINBFC9+dkBEAC0I/xn1uborMgDvBtf
H0sEhwnXBC849/32zic6udB6/3Efk9nzbSpL3FSOuXITZsZgCHPkKarnoQ2ztMcS
sh1ke1C5gQGms75UVmM/nS+2YI4vY8OX/GC/on2vUyncqdH+bR6xH5hx4NbWpfTs
iQHmz5C6zzS/kuabGdZyKRaZHt23WQ7JX/4zpjqbC99DjHcP9BSk7tJ8wI4bkMYD
uFVQdT9O6HwyKGYwUU4sAQRAj7XCTGvVbT0dpgJwH4RmrEtJoHAx4Whg8mJ710E0
GCmzf2jqkNuOw76ivgk27Kge+Hw00jmJjQhHY0yVbiaoJwcRrPKzaSjEVNgrpgP3
lXPRGQArgESsIOTeVVHQ8fhK2YtTeCY9rIiO+L0OX2xo9HK7hfHZZWL6rqymXdyS
fhzh/f6IPyHFWnvj7Brl7DR8heMikygcJqv+ed2yx7iLyCUJ10g12I48+aEj1aLe
dP7lna32iY8/Z0SHQLNH6PXO9SlPcq2aFUgKqE75A/0FMk7CunzU1OWr2ZtTLNO1
WT/13LfOhhuEq9jTyTosn0WxBjJKq18lnhzCXlaw6EAtbA7CUwsD3CTPR56aAXFK
3I7KXOVAqggrvMe5Tpdg5drfYpI8hZovL5aAgb+7Y5ta10TcJdUhS5K3kFAWe/td
U0cmWUMDP1UMSQ5Jg6JIQVWhSwARAQABiQIfBBgBCAAJBQJQvfnZAhsMAAoJEC9F
Xigk0Y3fRwsP/i0ElYCyxeLpWJTwo1iCLkMKz2yX1lFVa9nT1BVTPOQwr/IAc5OX
NdtbJ14fUsKL5pWgW8OmrXtwZm1y4euI1RPWWubG01ouzwnGzv26UcuHeqC5orZj
cOnKtL40y8VGMm8LoicVkRJH8blPORCnaLjdOtmA3rx/v2EXrJpSa3AhOy0ZSRXk
ZSrK68AVNwamHRoBSYyo0AtaXnkPX4+tmO8X8BPfj125IljubvwZPIW9VWR9UqCE
VPfDR1XKegVb6VStIywF7kmrknM1C5qUY28rdZYWgKorw01hBGV4jTW0cqde3N51
XT1jnIAa+NoXUM9uQoGYMiwrL7vNsLlyyiW5ayDyV92H/rIuiqhFgbJsHTlsm7I8
oGheR784BagAA1NIKD1qEO9T6Kz9lzlDaeWS5AUKeXrb7ZJLI1TTCIZx5/DxjLqM
Tt/RFBpVo9geZQrvLUqLAMwdaUvDXC2c6DaCPXTh65oCZj/hqzlJHH+RoTWWzKI+
BjXxgUWF9EmZUBrg68DSmI+9wuDFsjZ51BcqvJwxyfxtTaWhdoYqH/UQS+D1FP3/
diZHHlzwVwPICzM9ooNTgbrcDzyxRkIVqsVwBq7EtzcvgYUyX53yG25Giy6YQaQ2
ZtQ/VymwFL3XdUWV6B/hU4PVAFvO3qlOtdJ6TpE+nEWgcWjCv5g7RjXX
=MuOY
-----END PGP PUBLIC KEY BLOCK-----
"""
PRIVATE_KEY = """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.10 (GNU/Linux)
lQcYBFC9+dkBEADNRfwV23TWEoGc/x0wWH1P7PlXt8MnC2Z1kKaKKmfnglVrpOiz
iLWoiU58sfZ0L5vHkzXHXCBf6Eiy/EtUIvdiWAn+yASJ1mk5jZTBKO/WMAHD8wTO
zpMsFmWyg3xc4DkmFa9KQ5EVU0o/nqPeyQxNMQN7px5pPwrJtJFmPxnxm+aDkPYx
irDmz/4DeDNqXliazGJKw7efqBdlwTHkl9Akw2gwy178pmsKwHHEMOBOFFvX61AT<|fim▁hole|>d/INMwXnR9U81O8+7LT6yw/ca4ppcFoJD7/XJbkRiML6+bJ4Dakiy6i727BzV17g
wI1zqNvm5rAhtALKfACha6YO43aJzairO4II1wxVHvRDHZn2IuKDDephQ3Ii7/vb
hUOf6XCSmchkAcpKXUOvbxm1yfB1LRa64mMc2RcZxf4mW7KQkulBsdV5QG2276lv
U2UUy2IutXcGP5nXC+f6sJJGJeEToKJ57yiO/VWJFjKN8SvP+7AYsQSqINUuEf6H
T5gCPCraGMkTUTPXrREvu7NOohU78q6zZNaL3GW8ai7eSeANSuQ8Vzffx7Wd8Y7i
Pw9sYj0SMFs1UgjbuL6pO5ueHh+qyumbtAq2K0Bci0kqOcU4E9fNtdiovQARAQAB
AA/+JHtlL39G1wsH9R6UEfUQJGXR9MiIiwZoKcnRB2o8+DS+OLjg0JOh8XehtuCs
E/8oGQKtQqa5bEIstX7IZoYmYFiUQi9LOzIblmp2vxOm+HKkxa4JszWci2/ZmC3t
KtaA4adl9XVnshoQ7pijuCMUKB3naBEOAxd8s9d/JeReGIYkJErdrnVfNk5N71Ds
FmH5Ll3XtEDvgBUQP3nkA6QFjpsaB94FHjL3gDwum/cxzj6pCglcvHOzEhfY0Ddb
J967FozQTaf2JW3O+w3LOqtcKWpq87B7+O61tVidQPSSuzPjCtFF0D2LC9R/Hpky
KTMQ6CaKja4MPhjwywd4QPcHGYSqjMpflvJqi+kYIt8psUK/YswWjnr3r4fbuqVY
VhtiHvnBHQjz135lUqWvEz4hM3Xpnxydx7aRlv5NlevK8+YIO5oFbWbGNTWsPZI5
jpoFBpSsnR1Q5tnvtNHauvoWV+XN2qAOBTG+/nEbDYH6Ak3aaE9jrpTdYh0CotYF
q7csANsDy3JvkAzeU6WnYpsHHaAjqOGyiZGsLej1UcXPFMosE/aUo4WQhiS8Zx2c
zOVKOi/X5vQ2GdNT9Qolz8AriwzsvFR+bxPzyd8V6ALwDsoXvwEYinYBKK8j0OPv
OOihSR6HVsuP9NUZNU9ewiGzte/+/r6pNXHvR7wTQ8EWLcEIAN6Zyrb0bHZTIlxt
VWur/Ht2mIZrBaO50qmM5RD3T5oXzWXi/pjLrIpBMfeZR9DWfwQwjYzwqi7pxtYx
nJvbMuY505rfnMoYxb4J+cpRXV8MS7Dr1vjjLVUC9KiwSbM3gg6emfd2yuA93ihv
Pe3mffzLIiQa4mRE3wtGcioC43nWuV2K2e1KjxeFg07JhrezA/1Cak505ab/tmvP
4YmjR5c44+yL/YcQ3HdFgs4mV+nVbptRXvRcPpolJsgxPccGNdvHhsoR4gwXMS3F
RRPD2z6x8xeN73Q4KH3bm01swQdwFBZbWVfmUGLxvN7leCdfs9+iFJyqHiCIB6Iv
mQfp8F0IAOwSo8JhWN+V1dwML4EkIrM8wUb4yecNLkyR6TpPH/qXx4PxVMC+vy6x
sCtjeHIwKE+9vqnlhd5zOYh7qYXEJtYwdeDDmDbL8oks1LFfd+FyAuZXY33DLwn0
cRYsr2OEZmaajqUB3NVmj3H4uJBN9+paFHyFSXrH68K1Fk2o3n+RSf2EiX+eICwI
L6rqoF5sSVUghBWdNegV7qfy4anwTQwrIMGjgU5S6PKW0Dr/3iO5z3qQpGPAj5OW
ATqPWkDICLbObPxD5cJlyyNE2wCA9VVc6/1d6w4EVwSq9h3/WTpATEreXXxTGptd
LNiTA1nmakBYNO2Iyo3djhaqBdWjk+EIAKtVEnJH9FAVwWOvaj1RoZMA5DnDMo7e
SnhrCXl8AL7Z1WInEaybasTJXn1uQ8xY52Ua4b8cbuEKRKzw/70NesFRoMLYoHTO
dyeszvhoDHberpGRTciVmpMu7Hyi33rM31K9epA4ib6QbbCHnxkWOZB+Bhgj1hJ8
xb4RBYWiWpAYcg0+DAC3w9gfxQhtUlZPIbmbrBmrVkO2GVGUj8kH6k4UV6kUHEGY
HQWQR0HcbKcXW81ZXCCD0l7ROuEWQtTe5Jw7dJ4/QFuqZnPutXVRNOZqpl6eRShw
7X2/a29VXBpmHA95a88rSQsL+qm7Fb3prqRmuMCtrUZgFz7HLSTuUMR867QcTGVh
cCBUZXN0IEtleSA8bGVhcEBsZWFwLnNlPokCNwQTAQgAIQUCUL352QIbAwULCQgH
AwUVCgkICwUWAgMBAAIeAQIXgAAKCRAvRV4oJNGN30+xEACh9yLkZ4jqW0/wwyIM
MI896MQf1tAwzMj16MJYUjrjNK4Bn57QaQW926HsxF8C/OjT0MTRhq7heYZJnnEo
rj0rzpkJapUveTRkKeoTRtGGigqJYfkOTU7KRVwgJBXIfaKlI3tC3cX0j0H1fVKX
hLxsj5pNSPRCVf2A5mePg44HtXe6oVWSJ8+EcdTa0shf03NhAtFaY0BbFGPSm9mA
QUe4rxugwXPLctIyV4uweFo5BXFBCb4kKTBdnQi3aJwnoWLNT6rDdTe4/nhY0Hfo
alTCYGLkhio77gBHwpTOjEMO/hZhcDMi4CvxMPw7bRxAwq4u+0j0pDhkiLcQs4U4
Ou/fH+pia+1nF5h19cNVXIm+RX2fL0wxVYc/14AIAK3YT6PVev9XYEkogSj0P7Kb
HKOruYpnToXJBERNJZwGL1U+ihPNUyroRf29t7u8flnXsOpCtBEIWAO8Muy5pWjV
3O6zAUCfWetAieCQ7WrQVmdJDa7dlX3Qx1XagUzqZdAq2jVI1hOWDA2rKytnReSF
/A97rmLaWZ8aoNCs8i4NLcy9Lbzi9QtornYGVCEmTTym0tM9L/mn7gAJ8dqUwt7n
s24dibfElky4ZZeItD+D7OZGeh0FDuejvv2dXFqL1/pkHpGBZhEckg0fZ95NbgMC
4pSZkZnqRpr2GwfB5aFfB6sIIJ0HGARQvfnZARAAtCP8Z9bm6KzIA7wbXx9LBIcJ
1wQvOPf99s4nOrnQev9xH5PZ820qS9xUjrlyE2bGYAhz5Cmq56ENs7THErIdZHtQ
uYEBprO+VFZjP50vtmCOL2PDl/xgv6J9r1Mp3KnR/m0esR+YceDW1qX07IkB5s+Q
us80v5LmmxnWcikWmR7dt1kOyV/+M6Y6mwvfQ4x3D/QUpO7SfMCOG5DGA7hVUHU/
Tuh8MihmMFFOLAEEQI+1wkxr1W09HaYCcB+EZqxLSaBwMeFoYPJie9dBNBgps39o
6pDbjsO+or4JNuyoHvh8NNI5iY0IR2NMlW4mqCcHEazys2koxFTYK6YD95Vz0RkA
K4BErCDk3lVR0PH4StmLU3gmPayIjvi9Dl9saPRyu4Xx2WVi+q6spl3ckn4c4f3+
iD8hxVp74+wa5ew0fIXjIpMoHCar/nndsse4i8glCddINdiOPPmhI9Wi3nT+5Z2t
9omPP2dEh0CzR+j1zvUpT3KtmhVICqhO+QP9BTJOwrp81NTlq9mbUyzTtVk/9dy3
zoYbhKvY08k6LJ9FsQYySqtfJZ4cwl5WsOhALWwOwlMLA9wkz0eemgFxStyOylzl
QKoIK7zHuU6XYOXa32KSPIWaLy+WgIG/u2ObWtdE3CXVIUuSt5BQFnv7XVNHJllD
Az9VDEkOSYOiSEFVoUsAEQEAAQAP/1AagnZQZyzHDEgw4QELAspYHCWLXE5aZInX
wTUJhK31IgIXNn9bJ0hFiSpQR2xeMs9oYtRuPOu0P8oOFMn4/z374fkjZy8QVY3e
PlL+3EUeqYtkMwlGNmVw5a/NbNuNfm5Darb7pEfbYd1gPcni4MAYw7R2SG/57GbC
9gucvspHIfOSfBNLBthDzmK8xEKe1yD2eimfc2T7IRYb6hmkYfeds5GsqvGI6mwI
85h4uUHWRc5JOlhVM6yX8hSWx0L60Z3DZLChmc8maWnFXd7C8eQ6P1azJJbW71Ih
7CoK0XW4LE82vlQurSRFgTwfl7wFYszW2bOzCuhHDDtYnwH86Nsu0DC78ZVRnvxn
E8Ke/AJgrdhIOo4UAyR+aZD2+2mKd7/waOUTUrUtTzc7i8N3YXGi/EIaNReBXaq+
ZNOp24BlFzRp+FCF/pptDW9HjPdiV09x0DgICmeZS4Gq/4vFFIahWctg52NGebT0
Idxngjj+xDtLaZlLQoOz0n5ByjO/Wi0ANmMv1sMKCHhGvdaSws2/PbMR2r4caj8m
KXpIgdinM/wUzHJ5pZyF2U/qejsRj8Kw8KH/tfX4JCLhiaP/mgeTuWGDHeZQERAT
xPmRFHaLP9/ZhvGNh6okIYtrKjWTLGoXvKLHcrKNisBLSq+P2WeFrlme1vjvJMo/
jPwLT5o9CADQmcbKZ+QQ1ZM9v99iDZol7SAMZX43JC019sx6GK0u6xouJBcLfeB4
OXacTgmSYdTa9RM9fbfVpti01tJ84LV2SyL/VJq/enJF4XQPSynT/tFTn1PAor6o
tEAAd8fjKdJ6LnD5wb92SPHfQfXqI84rFEO8rUNIE/1ErT6DYifDzVCbfD2KZdoF
cOSp7TpD77sY1bs74ocBX5ejKtd+aH99D78bJSMM4pSDZsIEwnomkBHTziubPwJb
OwnATy0LmSMAWOw5rKbsh5nfwCiUTM20xp0t5JeXd+wPVWbpWqI2EnkCEN+RJr9i
7dp/ymDQ+Yt5wrsN3NwoyiexPOG91WQVCADdErHsnglVZZq9Z8Wx7KwecGCUurJ2
H6lKudv5YOxPnAzqZS5HbpZd/nRTMZh2rdXCr5m2YOuewyYjvM757AkmUpM09zJX
MQ1S67/UX2y8/74TcRF97Ncx9HeELs92innBRXoFitnNguvcO6Esx4BTe1OdU6qR
ER3zAmVf22Le9ciXbu24DN4mleOH+OmBx7X2PqJSYW9GAMTsRB081R6EWKH7romQ
waxFrZ4DJzZ9ltyosEJn5F32StyLrFxpcrdLUoEaclZCv2qka7sZvi0EvovDVEBU
e10jOx9AOwf8Gj2ufhquQ6qgVYCzbP+YrodtkFrXRS3IsljIchj1M2ffB/0bfoUs
rtER9pLvYzCjBPg8IfGLw0o754Qbhh/ReplCRTusP/fQMybvCvfxreS3oyEriu/G
GufRomjewZ8EMHDIgUsLcYo2UHZsfF7tcazgxMGmMvazp4r8vpgrvW/8fIN/6Adu
tF+WjWDTvJLFJCe6O+BFJOWrssNrrra1zGtLC1s8s+Wfpe+bGPL5zpHeebGTwH1U
22eqgJArlEKxrfarz7W5+uHZJHSjF/K9ZvunLGD0n9GOPMpji3UO3zeM8IYoWn7E
/EWK1XbjnssNemeeTZ+sDh+qrD7BOi+vCX1IyBxbfqnQfJZvmcPWpruy1UsO+aIC
0GY8Jr3OL69dDQ21jueJAh8EGAEIAAkFAlC9+dkCGwwACgkQL0VeKCTRjd9HCw/+
LQSVgLLF4ulYlPCjWIIuQwrPbJfWUVVr2dPUFVM85DCv8gBzk5c121snXh9Swovm
laBbw6ate3BmbXLh64jVE9Za5sbTWi7PCcbO/bpRy4d6oLmitmNw6cq0vjTLxUYy
bwuiJxWREkfxuU85EKdouN062YDevH+/YResmlJrcCE7LRlJFeRlKsrrwBU3BqYd
GgFJjKjQC1peeQ9fj62Y7xfwE9+PXbkiWO5u/Bk8hb1VZH1SoIRU98NHVcp6BVvp
VK0jLAXuSauSczULmpRjbyt1lhaAqivDTWEEZXiNNbRyp17c3nVdPWOcgBr42hdQ
z25CgZgyLCsvu82wuXLKJblrIPJX3Yf+si6KqEWBsmwdOWybsjygaF5HvzgFqAAD
U0goPWoQ71PorP2XOUNp5ZLkBQp5etvtkksjVNMIhnHn8PGMuoxO39EUGlWj2B5l
Cu8tSosAzB1pS8NcLZzoNoI9dOHrmgJmP+GrOUkcf5GhNZbMoj4GNfGBRYX0SZlQ
GuDrwNKYj73C4MWyNnnUFyq8nDHJ/G1NpaF2hiof9RBL4PUU/f92JkceXPBXA8gL
Mz2ig1OButwPPLFGQhWqxXAGrsS3Ny+BhTJfnfIbbkaLLphBpDZm1D9XKbAUvdd1
RZXoH+FTg9UAW87eqU610npOkT6cRaBxaMK/mDtGNdc=
=JTFu
-----END PGP PRIVATE KEY BLOCK-----
"""<|fim▁end|> | huKqHYmlCGSliwbrJppTG7jc1/ls3itrK+CWTg4txREkSpEVmfcASvw/ZqLbjgfs |
<|file_name|>webData.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <headingcell level=1>
# Intercity Python API Development
# <codecell>
from bs4 import BeautifulSoup
import requests
import pickle
loadSite = requests.get('http://www.intercity.co.nz/')
siteData = loadSite.content
blehData = siteData.split()
blehData[0:20]
siteData.swapcase()
print siteData.find('a')
omgSite = BeautifulSoup(siteData)
linkZite = omgSite.text
# <codecell>
pickle.dump(linkZite, open('outpuz.txt', 'wb'))
# <codecell>
dizTxt = open('outpuz.txt', 'r')
dizTxt.read()
# <codecell>
def save(linkZite):
saveFilz = open('save.txt', 'w')
for linz in linkZite:
values = line.split()
savefilz.write(values)
saveFilz.close()
# <codecell>
print linkZite
# <codecell>
print omgSite.unwrap
# <codecell>
omgSite.encode
# <codecell>
savzSite = omgSite.find_all(id=True)
# <codecell>
sortSite = linkSite[0:30]
# <codecell>
print daSite.next_element
# <codecell>
daSite = sortSite[15]
# <codecell>
linkSite = omgSite.find_all('a')
# <codecell>
saveLinkz = open('htmldoc', 'w')
saveLinkz.write(siteData)
saveLinkz.close()
# <codecell>
openLinkz = open('htmldoc', 'r')
openLinkz.read()
# <codecell>
print omgSite.extract()
# <codecell>
print omgSite.setup
# <codecell>
print omgSite.title
# <codecell>
print omgSite.wrap
# <codecell>
print omgSite.body
# <codecell>
print omgSite.head
# <codecell>
print omgSite.currentTag()
# <codecell>
print omgSite.prettify
# <codecell>
# <codecell>
# <codecell>
print loadSite.url
# <codecell>
beaut = BeautifulSoup(loadSite)
# <codecell>
reTweetz = open('testing.txt', 'w')
reTweetz.write('Fixed request')
reTweetz.close()
# <codecell>
daTweetz = open('testing.txt', 'r')
daTweetz.read()
# <codecell>
print diemLink
# <codecell>
for data in loadSite:
mixData = BeautifulSoup(data)
diemLink = mixData.a
print diemLink
seioLink = mixData.findAll('a')
print seioLink
print(mixData.get_text())
# <codecell>
mixOpen = open('outputz', 'r')
mixOpen.read()
# <codecell>
%%bash
git add .
git commit -m daTweetz
# <codecell>
%%bash
git push https://github.com/wcmckee/intercity
# <codecell>
testing = []
# <codecell><|fim▁hole|>testing.append(daTweetz)
# <codecell>
print testing
# <codecell>
for site in loadSite:
# <codecell>
for site in loadSite:
daLink = []
dafile = open('output', 'w')
daLink.append(site)
inter = BeautifulSoup(site)
daLink.append(inter)
geter = inter.text
daLink.append(geter)
beuLink = BeautifulSoup(daLink[0])
print beuLink.a
# <codecell>
for site in loadSite:
print'print site'
inter = BeautifulSoup(site)
print inter.titlefor site in loadSite:
print'print site'
inter = BeautifulSoup(site)
print inter.title
# <codecell>
for site in loadSite:
print'print site'
inter = BeautifulSoup(site)
print inter.title
# <codecell>
# <codecell>
print inter
# <codecell>
print inter
# <headingcell level=2>
# Timetable
# <codecell>
loadUrl = requests.get('http://www.intercity.co.nz/travel-info/timetable/')
# <codecell>
for da in loadUrl:
print da.title()
# <codecell>
selz = BeautifulSoup(da)
# <codecell>
print selz.title
# <codecell>
timez = BeautifulSoup(loadUrl)
# <codecell>
nakedSite = requests.get('http://nakedbus.com/nz/bus/')
# <codecell>
for naked in nakedSite:
print naked
# <codecell>
# <codecell><|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__title__ = 'pywebtask'
__version__ = '0.1.8'
__build__ = 0x000108
__author__ = 'Sebastián José Seba'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Sebastián José Seba'
from .webtasks import run, run_file
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):<|fim▁hole|><|fim▁end|> | pass
logging.getLogger(__name__).addHandler(NullHandler()) |
<|file_name|>preview.ts<|end_file_name|><|fim▁begin|>import {
document,
HTMLElement,
__STORYBOOK_STORY_STORE__ as storyStore,
__STORYBOOK_CLIENT_API__ as clientApi,
} from 'global';
import qs from 'qs';
import addons from '@storybook/addons';
import { STORY_CHANGED, SELECT_STORY } from '@storybook/core-events';
import { toId } from '@storybook/csf';
import { logger } from '@storybook/client-logger';
interface ParamsId {
storyId: string;
}
interface ParamsCombo {
kind: string;
story: string;
}
export const navigate = (params: ParamsId | ParamsCombo) =>
addons.getChannel().emit(SELECT_STORY, params);
const generateUrl = (id: string) => {
const { location } = document;
const query = qs.parse(location.search, { ignoreQueryPrefix: true });
return `${location.origin + location.pathname}?${qs.stringify(
{ ...query, id },
{ encode: false }
)}`;
};
const valueOrCall = (args: string[]) => (value: string | ((...args: string[]) => string)) =>
typeof value === 'function' ? value(...args) : value;
export const linkTo = (
idOrKindInput: string,
storyInput?: string | ((...args: any[]) => string)
) => (...args: any[]) => {
const resolver = valueOrCall(args);
const { storyId } = storyStore.getSelection();
const current = storyStore.fromId(storyId) || {};
const kindVal = resolver(idOrKindInput);
const storyVal = resolver(storyInput);
const fromid = storyStore.fromId(kindVal);
const item =
fromid ||
clientApi.raw().find((i: any) => {
if (kindVal && storyVal) {
return i.kind === kindVal && i.story === storyVal;
}
if (!kindVal && storyVal) {
return i.kind === current.kind && i.story === storyVal;
}
if (kindVal && !storyVal) {
return i.kind === kindVal;
}
if (!kindVal && !storyVal) {
return i.kind === current.kind;
}
return false;
});
if (item) {
navigate({
kind: item.kind,
story: item.story,
});
} else {
logger.error('could not navigate to provided story');
}
};
export const hrefTo = (kind: string, name: string): Promise<string> => {
return new Promise(resolve => {
const { storyId } = storyStore.getSelection();<|fim▁hole|> });
};
const linksListener = (e: Event) => {
const { target } = e;
if (!(target instanceof HTMLElement)) {
return;
}
const element = target as HTMLElement;
const { sbKind: kind, sbStory: story } = element.dataset;
if (kind || story) {
e.preventDefault();
navigate({ kind, story });
}
};
let hasListener = false;
const on = () => {
if (!hasListener) {
hasListener = true;
document.addEventListener('click', linksListener);
}
};
const off = () => {
if (hasListener) {
hasListener = false;
document.removeEventListener('click', linksListener);
}
};
export const withLinks = (storyFn: () => void) => {
on();
addons.getChannel().once(STORY_CHANGED, off);
return storyFn();
};<|fim▁end|> | const current = storyStore.fromId(storyId);
resolve(generateUrl(toId(kind || current.kind, name))); |
<|file_name|>ctaBacktesting.py<|end_file_name|><|fim▁begin|># encoding: UTF-8
'''
本文件中包含的是CTA模块的回测引擎,回测引擎的API和CTA引擎一致,
可以使用和实盘相同的代码进行回测。
'''
from datetime import datetime, timedelta
from collections import OrderedDict
from itertools import product
import pymongo
# import MySQLdb
import json
import os
import cPickle
import csv
from ctaBase import *
from ctaSetting import *
from eventEngine import *
from vtConstant import *
from vtGateway import VtOrderData, VtTradeData
from vtFunction import loadMongoSetting
import logging
import copy
import pandas as pd
from ctaBase import *
########################################################################
class BacktestingEngine(object):
"""
CTA回测引擎
函数接口和策略引擎保持一样,
从而实现同一套代码从回测到实盘。
# modified by IncenseLee:
1.增加Mysql数据库的支持;
2.修改装载数据为批量式后加载模式。
3.增加csv 读取bar的回测模式
4.增加csv 读取tick合并价差的回测模式
5.增加EventEngine,并对newBar增加发送OnBar事件,供外部的回测主体显示Bar线。
"""
TICK_MODE = 'tick' # 数据模式,逐Tick回测
BAR_MODE = 'bar' # 数据模式,逐Bar回测
REALTIME_MODE = 'RealTime' # 逐笔交易计算资金,供策略获取资金容量,计算开仓数量
FINAL_MODE = 'Final' # 最后才统计交易,不适合按照百分比等开仓数量计算
# ----------------------------------------------------------------------
def __init__(self, eventEngine=None, initCapital = 100000):
"""Constructor"""
self.eventEngine = eventEngine
# 本地停止单编号计数
self.stopOrderCount = 0
# stopOrderID = STOPORDERPREFIX + str(stopOrderCount)
# 本地停止单字典
# key为stopOrderID,value为stopOrder对象
self.stopOrderDict = {} # 停止单撤销后不会从本字典中删除
self.workingStopOrderDict = {} # 停止单撤销后会从本字典中删除
# 引擎类型为回测
self.engineType = ENGINETYPE_BACKTESTING
# 回测相关
self.strategy = None # 回测策略
self.mode = self.BAR_MODE # 回测模式,默认为K线
self.slippage = 0 # 回测时假设的滑点
self.rate = 0 # 回测时假设的佣金比例(适用于百分比佣金)
self.size = 1 # 合约大小,默认为1
self.dbClient = None # 数据库客户端
self.dbCursor = None # 数据库指针
self.historyData = [] # 历史数据的列表,回测用
self.initData = [] # 初始化用的数据
self.backtestingData = [] # 回测用的数据
self.dbName = '' # 回测数据库名
self.symbol = '' # 回测集合名
self.dataStartDate = None # 回测数据开始日期,datetime对象
self.dataEndDate = None # 回测数据结束日期,datetime对象
self.strategyStartDate = None # 策略启动日期(即前面的数据用于初始化),datetime对象
self.limitOrderDict = OrderedDict() # 限价单字典
self.workingLimitOrderDict = OrderedDict() # 活动限价单字典,用于进行撮合用
self.limitOrderCount = 0 # 限价单编号
# 持仓缓存字典
# key为vtSymbol,value为PositionBuffer对象
self.posBufferDict = {}
<|fim▁hole|> self.logList = [] # 日志记录
# 当前最新数据,用于模拟成交用
self.tick = None
self.bar = None
self.dt = None # 最新的时间
self.gatewayName = u'BackTest'
# csvFile相关
self.barTimeInterval = 60 # csv文件,属于K线类型,K线的周期(秒数),缺省是1分钟
# 回测计算相关
self.calculateMode = self.FINAL_MODE
self.usageCompounding = False # 是否使用简单复利 (只针对FINAL_MODE有效)
self.initCapital = 100000 # 期初资金
self.capital = self.initCapital # 资金 (相当于Balance)
self.maxCapital = self.initCapital # 资金最高净值
# 费用情况
self.avaliable = self.initCapital
self.percent = EMPTY_FLOAT
self.percentLimit = 30 # 投资仓位比例上限
self.maxPnl = 0 # 最高盈利
self.minPnl = 0 # 最大亏损
self.maxVolume = 1 # 最大仓位数
self.winningResult = 0 # 盈利次数
self.losingResult = 0 # 亏损次数
self.totalResult = 0 # 总成交数量
self.totalWinning = 0 # 总盈利
self.totalLosing = 0 # 总亏损
self.totalTurnover = 0 # 总成交金额(合约面值)
self.totalCommission = 0 # 总手续费
self.totalSlippage = 0 # 总滑点
self.timeList = [] # 时间序列
self.pnlList = [] # 每笔盈亏序列
self.capitalList = [] # 盈亏汇总的时间序列
self.drawdownList = [] # 回撤的时间序列
self.drawdownRateList = [] # 最大回撤比例的时间序列
self.exportTradeList = [] # 导出交易记录列表
self.fixCommission = EMPTY_FLOAT # 固定交易费用
def getAccountInfo(self):
"""返回账号的实时权益,可用资金,仓位比例,投资仓位比例上限"""
if self.capital == EMPTY_FLOAT:
self.percent = EMPTY_FLOAT
return self.capital, self.avaliable, self.percent, self.percentLimit
# ----------------------------------------------------------------------
def setStartDate(self, startDate='20100416', initDays=10):
"""设置回测的启动日期"""
self.dataStartDate = datetime.strptime(startDate, '%Y%m%d')
# 初始化天数
initTimeDelta = timedelta(initDays)
self.strategyStartDate = self.dataStartDate + initTimeDelta
# ----------------------------------------------------------------------
def setEndDate(self, endDate=''):
"""设置回测的结束日期"""
if endDate:
self.dataEndDate = datetime.strptime(endDate, '%Y%m%d')
else:
self.dataEndDate = datetime.now()
def setMinDiff(self, minDiff):
"""设置回测品种的最小跳价,用于修正数据"""
self.minDiff = minDiff
# ----------------------------------------------------------------------
def setBacktestingMode(self, mode):
"""设置回测模式"""
self.mode = mode
# ----------------------------------------------------------------------
def setDatabase(self, dbName, symbol):
"""设置历史数据所用的数据库"""
self.dbName = dbName
self.symbol = symbol
# ----------------------------------------------------------------------
def loadHistoryDataFromMongo(self):
"""载入历史数据"""
host, port, log = loadMongoSetting()
self.dbClient = pymongo.MongoClient(host, port)
collection = self.dbClient[self.dbName][self.symbol]
self.output(u'开始载入数据')
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
dataClass = CtaBarData
func = self.newBar
else:
dataClass = CtaTickData
func = self.newTick
# 载入初始化需要用的数据
flt = {'datetime': {'$gte': self.dataStartDate,
'$lt': self.strategyStartDate}}
initCursor = collection.find(flt)
# 将数据从查询指针中读取出,并生成列表
for d in initCursor:
data = dataClass()
data.__dict__ = d
self.initData.append(data)
# 载入回测数据
if not self.dataEndDate:
flt = {'datetime': {'$gte': self.strategyStartDate}} # 数据过滤条件
else:
flt = {'datetime': {'$gte': self.strategyStartDate,
'$lte': self.dataEndDate}}
self.dbCursor = collection.find(flt)
self.output(u'载入完成,数据量:%s' % (initCursor.count() + self.dbCursor.count()))
# ----------------------------------------------------------------------
def connectMysql(self):
"""连接MysqlDB"""
# 载入json文件
fileName = 'mysql_connect.json'
try:
f = file(fileName)
except IOError:
self.writeCtaLog(u'回测引擎读取Mysql_connect.json失败')
return
# 解析json文件
setting = json.load(f)
try:
mysql_host = str(setting['host'])
mysql_port = int(setting['port'])
mysql_user = str(setting['user'])
mysql_passwd = str(setting['passwd'])
mysql_db = str(setting['db'])
except IOError:
self.writeCtaLog(u'回测引擎读取Mysql_connect.json,连接配置缺少字段,请检查')
return
try:
self.__mysqlConnection = MySQLdb.connect(host=mysql_host, user=mysql_user,
passwd=mysql_passwd, db=mysql_db, port=mysql_port)
self.__mysqlConnected = True
self.writeCtaLog(u'回测引擎连接MysqlDB成功')
except Exception:
self.writeCtaLog(u'回测引擎连接MysqlDB失败')
# ----------------------------------------------------------------------
def loadDataHistoryFromMysql(self, symbol, startDate, endDate):
"""载入历史TICK数据
如果加载过多数据会导致加载失败,间隔不要超过半年
"""
if not endDate:
endDate = datetime.today()
# 看本地缓存是否存在
if self.__loadDataHistoryFromLocalCache(symbol, startDate, endDate):
self.writeCtaLog(u'历史TICK数据从Cache载入')
return
# 每次获取日期周期
intervalDays = 10
for i in range(0, (endDate - startDate).days + 1, intervalDays):
d1 = startDate + timedelta(days=i)
if (endDate - d1).days > 10:
d2 = startDate + timedelta(days=i + intervalDays - 1)
else:
d2 = endDate
# 从Mysql 提取数据
self.__qryDataHistoryFromMysql(symbol, d1, d2)
self.writeCtaLog(u'历史TICK数据共载入{0}条'.format(len(self.historyData)))
# 保存本地cache文件
self.__saveDataHistoryToLocalCache(symbol, startDate, endDate)
def __loadDataHistoryFromLocalCache(self, symbol, startDate, endDate):
"""看本地缓存是否存在
added by IncenseLee
"""
# 运行路径下cache子目录
cacheFolder = os.getcwd() + '/cache'
# cache文件
cacheFile = u'{0}/{1}_{2}_{3}.pickle'. \
format(cacheFolder, symbol, startDate.strftime('%Y-%m-%d'), endDate.strftime('%Y-%m-%d'))
if not os.path.isfile(cacheFile):
return False
else:
try:
# 从cache文件加载
cache = open(cacheFile, mode='r')
self.historyData = cPickle.load(cache)
cache.close()
return True
except Exception as e:
self.writeCtaLog(u'读取文件{0}失败'.format(cacheFile))
return False
def __saveDataHistoryToLocalCache(self, symbol, startDate, endDate):
"""保存本地缓存
added by IncenseLee
"""
# 运行路径下cache子目录
cacheFolder = os.getcwd() + '/cache'
# 创建cache子目录
if not os.path.isdir(cacheFolder):
os.mkdir(cacheFolder)
# cache 文件名
cacheFile = u'{0}/{1}_{2}_{3}.pickle'. \
format(cacheFolder, symbol, startDate.strftime('%Y-%m-%d'), endDate.strftime('%Y-%m-%d'))
# 重复存在 返回
if os.path.isfile(cacheFile):
return False
else:
# 写入cache文件
cache = open(cacheFile, mode='w')
cPickle.dump(self.historyData, cache)
cache.close()
return True
# ----------------------------------------------------------------------
def __qryDataHistoryFromMysql(self, symbol, startDate, endDate):
"""从Mysql载入历史TICK数据
added by IncenseLee
"""
try:
self.connectMysql()
if self.__mysqlConnected:
# 获取指针
cur = self.__mysqlConnection.cursor(MySQLdb.cursors.DictCursor)
if endDate:
# 开始日期 ~ 结束日期
sqlstring = ' select \'{0}\' as InstrumentID, str_to_date(concat(ndate,\' \', ntime),' \
'\'%Y-%m-%d %H:%i:%s\') as UpdateTime,price as LastPrice,vol as Volume, day_vol as DayVolume,' \
'position_vol as OpenInterest,bid1_price as BidPrice1,bid1_vol as BidVolume1, ' \
'sell1_price as AskPrice1, sell1_vol as AskVolume1 from TB_{0}MI ' \
'where ndate between cast(\'{1}\' as date) and cast(\'{2}\' as date) order by UpdateTime'. \
format(symbol, startDate, endDate)
elif startDate:
# 开始日期 - 当前
sqlstring = ' select \'{0}\' as InstrumentID,str_to_date(concat(ndate,\' \', ntime),' \
'\'%Y-%m-%d %H:%i:%s\') as UpdateTime,price as LastPrice,vol as Volume, day_vol as DayVolume,' \
'position_vol as OpenInterest,bid1_price as BidPrice1,bid1_vol as BidVolume1, ' \
'sell1_price as AskPrice1, sell1_vol as AskVolume1 from TB__{0}MI ' \
'where ndate > cast(\'{1}\' as date) order by UpdateTime'. \
format(symbol, startDate)
else:
# 所有数据
sqlstring = ' select \'{0}\' as InstrumentID,str_to_date(concat(ndate,\' \', ntime),' \
'\'%Y-%m-%d %H:%i:%s\') as UpdateTime,price as LastPrice,vol as Volume, day_vol as DayVolume,' \
'position_vol as OpenInterest,bid1_price as BidPrice1,bid1_vol as BidVolume1, ' \
'sell1_price as AskPrice1, sell1_vol as AskVolume1 from TB__{0}MI order by UpdateTime'. \
format(symbol)
self.writeCtaLog(sqlstring)
# 执行查询
count = cur.execute(sqlstring)
self.writeCtaLog(u'历史TICK数据共{0}条'.format(count))
# 分批次读取
fetch_counts = 0
fetch_size = 1000
while True:
results = cur.fetchmany(fetch_size)
if not results:
break
fetch_counts = fetch_counts + len(results)
if not self.historyData:
self.historyData = results
else:
self.historyData = self.historyData + results
self.writeCtaLog(u'{1}~{2}历史TICK数据载入共{0}条'.format(fetch_counts, startDate, endDate))
else:
self.writeCtaLog(u'MysqlDB未连接,请检查')
except MySQLdb.Error as e:
self.writeCtaLog(u'MysqlDB载入数据失败,请检查.Error {0}'.format(e))
def __dataToTick(self, data):
"""
数据库查询返回的data结构,转换为tick对象
added by IncenseLee """
tick = CtaTickData()
symbol = data['InstrumentID']
tick.symbol = symbol
# 创建TICK数据对象并更新数据
tick.vtSymbol = symbol
# tick.openPrice = data['OpenPrice']
# tick.highPrice = data['HighestPrice']
# tick.lowPrice = data['LowestPrice']
tick.lastPrice = float(data['LastPrice'])
# bug fix:
# ctp日常传送的volume数据,是交易日日内累加值。数据库的volume,是数据商自行计算整理的
# 因此,改为使用DayVolume,与CTP实盘一致
# tick.volume = data['Volume']
tick.volume = data['DayVolume']
tick.openInterest = data['OpenInterest']
# tick.upperLimit = data['UpperLimitPrice']
# tick.lowerLimit = data['LowerLimitPrice']
tick.datetime = data['UpdateTime']
tick.date = tick.datetime.strftime('%Y-%m-%d')
tick.time = tick.datetime.strftime('%H:%M:%S')
# 数据库中并没有tradingDay的数据,回测时,暂时按照date授予。
tick.tradingDay = tick.date
tick.bidPrice1 = float(data['BidPrice1'])
# tick.bidPrice2 = data['BidPrice2']
# tick.bidPrice3 = data['BidPrice3']
# tick.bidPrice4 = data['BidPrice4']
# tick.bidPrice5 = data['BidPrice5']
tick.askPrice1 = float(data['AskPrice1'])
# tick.askPrice2 = data['AskPrice2']
# tick.askPrice3 = data['AskPrice3']
# tick.askPrice4 = data['AskPrice4']
# tick.askPrice5 = data['AskPrice5']
tick.bidVolume1 = data['BidVolume1']
# tick.bidVolume2 = data['BidVolume2']
# tick.bidVolume3 = data['BidVolume3']
# tick.bidVolume4 = data['BidVolume4']
# tick.bidVolume5 = data['BidVolume5']
tick.askVolume1 = data['AskVolume1']
# tick.askVolume2 = data['AskVolume2']
# tick.askVolume3 = data['AskVolume3']
# tick.askVolume4 = data['AskVolume4']
# tick.askVolume5 = data['AskVolume5']
return tick
# ----------------------------------------------------------------------
def getMysqlDeltaDate(self, symbol, startDate, decreaseDays):
"""从mysql库中获取交易日前若干天
added by IncenseLee
"""
try:
if self.__mysqlConnected:
# 获取mysql指针
cur = self.__mysqlConnection.cursor()
sqlstring = 'select distinct ndate from TB_{0}MI where ndate < ' \
'cast(\'{1}\' as date) order by ndate desc limit {2},1'.format(symbol, startDate,
decreaseDays - 1)
# self.writeCtaLog(sqlstring)
count = cur.execute(sqlstring)
if count > 0:
# 提取第一条记录
result = cur.fetchone()
return result[0]
else:
self.writeCtaLog(u'MysqlDB没有查询结果,请检查日期')
else:
self.writeCtaLog(u'MysqlDB未连接,请检查')
except MySQLdb.Error as e:
self.writeCtaLog(u'MysqlDB载入数据失败,请检查.Error {0}: {1}'.format(e.arg[0], e.arg[1]))
# 出错后缺省返回
return startDate - timedelta(days=3)
# ----------------------------------------------------------------------
def runBackTestingWithArbTickFile(self, mainPath, arbSymbol):
"""运行套利回测(使用本地tickcsv数据)
参数:套利代码 SP rb1610&rb1701
added by IncenseLee
原始的tick,分别存放在白天目录1和夜盘目录2中,每天都有各个合约的数据
Z:\ticks\SHFE\201606\RB\0601\
RB1610.txt
RB1701.txt
....
Z:\ticks\SHFE_night\201606\RB\0601
RB1610.txt
RB1701.txt
....
夜盘目录为自然日,不是交易日。
按照回测的开始日期,到结束日期,循环每一天。
每天优先读取日盘数据,再读取夜盘数据。
读取eg1(如RB1610),读取Leg2(如RB701),合并成价差tick,灌输到策略的onTick中。
"""
self.capital = self.initCapital # 更新设置期初资金
if len(arbSymbol) < 1:
self.writeCtaLog(u'套利合约为空')
return
if not (arbSymbol.upper().index("SP") == 0 and arbSymbol.index(" ") > 0 and arbSymbol.index("&") > 0):
self.writeCtaLog(u'套利合约格式不符合')
return
# 获得Leg1,leg2
legs = arbSymbol[arbSymbol.index(" "):]
leg1 = legs[1:legs.index("&")]
leg2 = legs[legs.index("&") + 1:]
self.writeCtaLog(u'Leg1:{0},Leg2:{1}'.format(leg1, leg2))
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
# RB
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
self.writeCtaLog(u'本回测仅支持tick模式')
return
testdays = (self.dataEndDate - self.dataStartDate).days
if testdays < 1:
self.writeCtaLog(u'回测时间不足')
return
for i in range(0, testdays):
testday = self.dataStartDate + timedelta(days=i)
self.output(u'回测日期:{0}'.format(testday))
# 白天数据
self.__loadArbTicks(mainPath, testday, leg1, leg2)
# 夜盘数据
self.__loadArbTicks(mainPath + '_night', testday, leg1, leg2)
def __loadArbTicks(self, mainPath, testday, leg1, leg2):
self.writeCtaLog(u'加载回测日期:{0}\{1}的价差tick'.format(mainPath, testday))
cachefilename = u'{0}_{1}_{2}_{3}_{4}'.format(self.symbol, leg1, leg2, mainPath, testday.strftime('%Y%m%d'))
arbTicks = self.__loadArbTicksFromLocalCache(cachefilename)
dt = None
if len(arbTicks) < 1:
leg1File = u'z:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}.txt' \
.format(mainPath, testday.strftime('%Y%m'), self.symbol, testday.strftime('%m%d'), leg1)
if not os.path.isfile(leg1File):
self.writeCtaLog(u'{0}文件不存在'.format(leg1File))
return
leg2File = u'z:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}.txt' \
.format(mainPath, testday.strftime('%Y%m'), self.symbol, testday.strftime('%m%d'), leg2)
if not os.path.isfile(leg2File):
self.writeCtaLog(u'{0}文件不存在'.format(leg2File))
return
# 先读取leg2的数据到目录,以日期时间为key
leg2Ticks = {}
leg2CsvReadFile = file(leg2File, 'rb')
# reader = csv.DictReader((line.replace('\0',' ') for line in leg2CsvReadFile), delimiter=",")
reader = csv.DictReader(leg2CsvReadFile, delimiter=",")
self.writeCtaLog(u'加载{0}'.format(leg2File))
for row in reader:
tick = CtaTickData()
tick.vtSymbol = self.symbol
tick.symbol = self.symbol
tick.date = testday.strftime('%Y%m%d')
tick.tradingDay = tick.date
tick.time = row['Time']
try:
tick.datetime = datetime.strptime(tick.date + ' ' + tick.time, '%Y%m%d %H:%M:%S.%f')
except Exception as ex:
self.writeCtaError(u'日期转换错误:{0},{1}:{2}'.format(tick.date + ' ' + tick.time, Exception, ex))
continue
# 修正毫秒
if tick.datetime.replace(microsecond=0) == dt:
# 与上一个tick的时间(去除毫秒后)相同,修改为500毫秒
tick.datetime = tick.datetime.replace(microsecond=500)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
else:
tick.datetime = tick.datetime.replace(microsecond=0)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
dt = tick.datetime
tick.lastPrice = float(row['LastPrice'])
tick.volume = int(float(row['LVolume']))
tick.bidPrice1 = float(row['BidPrice']) # 叫买价(价格低)
tick.bidVolume1 = int(float(row['BidVolume']))
tick.askPrice1 = float(row['AskPrice']) # 叫卖价(价格高)
tick.askVolume1 = int(float(row['AskVolume']))
# 排除涨停/跌停的数据
if (tick.bidPrice1 == float('1.79769E308') and tick.bidVolume1 == 0) \
or (tick.askPrice1 == float('1.79769E308') and tick.askVolume1 == 0):
continue
dtStr = tick.date + ' ' + tick.time
if dtStr in leg2Ticks:
self.writeCtaError(u'日内数据重复,异常,数据时间为:{0}'.format(dtStr))
else:
leg2Ticks[dtStr] = tick
leg1CsvReadFile = file(leg1File, 'rb')
# reader = csv.DictReader((line.replace('\0',' ') for line in leg1CsvReadFile), delimiter=",")
reader = csv.DictReader(leg1CsvReadFile, delimiter=",")
self.writeCtaLog(u'加载{0}'.format(leg1File))
dt = None
for row in reader:
arbTick = CtaTickData()
arbTick.date = testday.strftime('%Y%m%d')
arbTick.time = row['Time']
try:
arbTick.datetime = datetime.strptime(arbTick.date + ' ' + arbTick.time, '%Y%m%d %H:%M:%S.%f')
except Exception as ex:
self.writeCtaError(u'日期转换错误:{0},{1}:{2}'.format(arbTick.date + ' ' + arbTick.time, Exception, ex))
continue
# 修正毫秒
if arbTick.datetime.replace(microsecond=0) == dt:
# 与上一个tick的时间(去除毫秒后)相同,修改为500毫秒
arbTick.datetime = arbTick.datetime.replace(microsecond=500)
arbTick.time = arbTick.datetime.strftime('%H:%M:%S.%f')
else:
arbTick.datetime = arbTick.datetime.replace(microsecond=0)
arbTick.time = arbTick.datetime.strftime('%H:%M:%S.%f')
dt = arbTick.datetime
dtStr = ' '.join([arbTick.date, arbTick.time])
if dtStr in leg2Ticks:
leg2Tick = leg2Ticks[dtStr]
arbTick.vtSymbol = self.symbol
arbTick.symbol = self.symbol
arbTick.lastPrice = EMPTY_FLOAT
arbTick.volume = EMPTY_INT
leg1AskPrice1 = float(row['AskPrice'])
leg1AskVolume1 = int(float(row['AskVolume']))
leg1BidPrice1 = float(row['BidPrice'])
leg1BidVolume1 = int(float(row['BidVolume']))
# 排除涨停/跌停的数据
if ((leg1AskPrice1 == float('1.79769E308') or leg1AskPrice1 == 0) and leg1AskVolume1 == 0) \
or ((leg1BidPrice1 == float('1.79769E308') or leg1BidPrice1 == 0) and leg1BidVolume1 == 0):
continue
# 叫卖价差=leg1.askPrice1 - leg2.bidPrice1,volume为两者最小
arbTick.askPrice1 = leg1AskPrice1 - leg2Tick.bidPrice1
arbTick.askVolume1 = min(leg1AskVolume1, leg2Tick.bidVolume1)
# 叫买价差=leg1.bidPrice1 - leg2.askPrice1,volume为两者最小
arbTick.bidPrice1 = leg1BidPrice1 - leg2Tick.askPrice1
arbTick.bidVolume1 = min(leg1BidVolume1, leg2Tick.askVolume1)
arbTicks.append(arbTick)
del leg2Ticks[dtStr]
# 保存到历史目录
if len(arbTicks) > 0:
self.__saveArbTicksToLocalCache(cachefilename, arbTicks)
for t in arbTicks:
# 推送到策略中
self.newTick(t)
def __loadArbTicksFromLocalCache(self, filename):
"""从本地缓存中,加载数据"""
# 运行路径下cache子目录
cacheFolder = os.getcwd() + '/cache'
# cache文件
cacheFile = u'{0}/{1}.pickle'. \
format(cacheFolder, filename)
if not os.path.isfile(cacheFile):
return []
else:
# 从cache文件加载
cache = open(cacheFile, mode='r')
l = cPickle.load(cache)
cache.close()
return l
def __saveArbTicksToLocalCache(self, filename, arbticks):
"""保存价差tick到本地缓存目录"""
# 运行路径下cache子目录
cacheFolder = os.getcwd() + '/cache'
# 创建cache子目录
if not os.path.isdir(cacheFolder):
os.mkdir(cacheFolder)
# cache 文件名
cacheFile = u'{0}/{1}.pickle'. \
format(cacheFolder, filename)
# 重复存在 返回
if os.path.isfile(cacheFile):
return False
else:
# 写入cache文件
cache = open(cacheFile, mode='w')
cPickle.dump(arbticks, cache)
cache.close()
return True
def runBackTestingWithNonStrArbTickFile(self, leg1MainPath, leg2MainPath, leg1Symbol, leg2Symbol):
"""运行套利回测(使用本地tickcsv数据)
参数:
leg1MainPath: leg1合约所在的市场路径
leg2MainPath: leg2合约所在的市场路径
leg1Symbol: leg1合约
Leg2Symbol:leg2合约
added by IncenseLee
原始的tick,分别存放在白天目录1和夜盘目录2中,每天都有各个合约的数据
Z:\ticks\SHFE\201606\RB\0601\
RB1610.txt
RB1701.txt
....
Z:\ticks\SHFE_night\201606\RB\0601
RB1610.txt
RB1701.txt
....
夜盘目录为自然日,不是交易日。
按照回测的开始日期,到结束日期,循环每一天。
每天优先读取日盘数据,再读取夜盘数据。
读取eg1(如RB1610),读取Leg2(如RB701),根据两者tick的时间优先顺序,逐一tick灌输到策略的onTick中。
"""
self.capital = self.initCapital # 更新设置期初资金
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
# RB
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
self.writeCtaLog(u'本回测仅支持tick模式')
return
testdays = (self.dataEndDate - self.dataStartDate).days
if testdays < 1:
self.writeCtaLog(u'回测时间不足')
return
for i in range(0, testdays):
testday = self.dataStartDate + timedelta(days=i)
self.output(u'回测日期:{0}'.format(testday))
# 加载运行白天数据
self.__loadNotStdArbTicks(leg1MainPath, leg2MainPath, testday, leg1Symbol, leg2Symbol)
# 加载运行夜盘数据
self.__loadNotStdArbTicks(leg1MainPath + '_night', leg2MainPath + '_night', testday, leg1Symbol, leg2Symbol)
def __loadTicksFromFile(self, filepath, tickDate, vtSymbol):
"""从文件中读取tick"""
# 先读取数据到Dict,以日期时间为key
ticks = OrderedDict()
if not os.path.isfile(filepath):
self.writeCtaLog(u'{0}文件不存在'.format(filepath))
return ticks
dt = None
csvReadFile = file(filepath, 'rb')
reader = csv.DictReader(csvReadFile, delimiter=",")
self.writeCtaLog(u'加载{0}'.format(filepath))
for row in reader:
tick = CtaTickData()
tick.vtSymbol = vtSymbol
tick.symbol = vtSymbol
tick.date = tickDate.strftime('%Y%m%d')
tick.tradingDay = tick.date
tick.time = row['Time']
try:
tick.datetime = datetime.strptime(tick.date + ' ' + tick.time, '%Y%m%d %H:%M:%S.%f')
except Exception as ex:
self.writeCtaError(u'日期转换错误:{0},{1}:{2}'.format(tick.date + ' ' + tick.time, Exception, ex))
continue
# 修正毫秒
if tick.datetime.replace(microsecond=0) == dt:
# 与上一个tick的时间(去除毫秒后)相同,修改为500毫秒
tick.datetime = tick.datetime.replace(microsecond=500)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
else:
tick.datetime = tick.datetime.replace(microsecond=0)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
dt = tick.datetime
tick.lastPrice = float(row['LastPrice'])
tick.volume = int(float(row['LVolume']))
tick.bidPrice1 = float(row['BidPrice']) # 叫买价(价格低)
tick.bidVolume1 = int(float(row['BidVolume']))
tick.askPrice1 = float(row['AskPrice']) # 叫卖价(价格高)
tick.askVolume1 = int(float(row['AskVolume']))
# 排除涨停/跌停的数据
if (tick.bidPrice1 == float('1.79769E308') and tick.bidVolume1 == 0) \
or (tick.askPrice1 == float('1.79769E308') and tick.askVolume1 == 0):
continue
dtStr = tick.date + ' ' + tick.time
if dtStr in ticks:
self.writeCtaError(u'日内数据重复,异常,数据时间为:{0}'.format(dtStr))
else:
ticks[dtStr] = tick
return ticks
def __loadNotStdArbTicks(self, leg1MainPath, leg2MainPath, testday, leg1Symbol, leg2Symbol):
self.writeCtaLog(u'加载回测日期:{0}的价差tick'.format(testday))
leg1File = u'z:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}.txt' \
.format(leg1MainPath, testday.strftime('%Y%m'), self.symbol, testday.strftime('%m%d'), leg1Symbol)
if not os.path.isfile(leg1File):
self.writeCtaLog(u'{0}文件不存在'.format(leg1File))
return
leg2File = u'z:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}.txt' \
.format(leg2MainPath, testday.strftime('%Y%m'), self.symbol, testday.strftime('%m%d'), leg2Symbol)
if not os.path.isfile(leg2File):
self.writeCtaLog(u'{0}文件不存在'.format(leg2File))
return
leg1Ticks = self.__loadTicksFromFile(filepath=leg1File, tickDate=testday, vtSymbol=leg1Symbol)
if len(leg1Ticks) == 0:
self.writeCtaLog(u'{0}读取tick数为空'.format(leg1File))
return
leg2Ticks = self.__loadTicksFromFile(filepath=leg2File, tickDate=testday, vtSymbol=leg2Symbol)
if len(leg2Ticks) == 0:
self.writeCtaLog(u'{0}读取tick数为空'.format(leg1File))
return
leg1_tick = None
leg2_tick = None
while not (len(leg1Ticks) == 0 or len(leg2Ticks) == 0):
if leg1_tick is None and len(leg1Ticks) > 0:
leg1_tick = leg1Ticks.popitem(last=False)
if leg2_tick is None and len(leg2Ticks) > 0:
leg2_tick = leg2Ticks.popitem(last=False)
if leg1_tick is None and leg2_tick is not None:
self.newTick(leg2_tick[1])
leg2_tick = None
elif leg1_tick is not None and leg2_tick is None:
self.newTick(leg1_tick[1])
leg1_tick = None
elif leg1_tick is not None and leg2_tick is not None:
leg1 = leg1_tick[1]
leg2 = leg2_tick[1]
if leg1.datetime <= leg2.datetime:
self.newTick(leg1)
leg1_tick = None
else:
self.newTick(leg2)
leg2_tick = None
def runBackTestingWithNonStrArbTickFile2(self, leg1MainPath, leg2MainPath, leg1Symbol, leg2Symbol):
"""运行套利回测(使用本地tickcsv数据,数据从taobao标普购买)
参数:
leg1MainPath: leg1合约所在的市场路径
leg2MainPath: leg2合约所在的市场路径
leg1Symbol: leg1合约
Leg2Symbol:leg2合约
added by IncenseLee
原始的tick,存放在相应市场下每天的目录中,目录包含市场各个合约的数据
E:\ticks\SQ\201606\20160601\
RB10.csv
RB01.csv
....
目录为交易日。
按照回测的开始日期,到结束日期,循环每一天。
读取eg1(如RB1610),读取Leg2(如RB701),根据两者tick的时间优先顺序,逐一tick灌输到策略的onTick中。
"""
self.capital = self.initCapital # 更新设置期初资金
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
# RB
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
self.writeCtaLog(u'本回测仅支持tick模式')
return
testdays = (self.dataEndDate - self.dataStartDate).days
if testdays < 1:
self.writeCtaLog(u'回测时间不足')
return
for i in range(0, testdays):
testday = self.dataStartDate + timedelta(days=i)
self.output(u'回测日期:{0}'.format(testday))
# 加载运行每天数据
self.__loadNotStdArbTicks2(leg1MainPath, leg2MainPath, testday, leg1Symbol, leg2Symbol)
def __loadTicksFromFile2(self, filepath, tickDate, vtSymbol):
"""从csv文件中UnicodeDictReader读取tick"""
# 先读取数据到Dict,以日期时间为key
ticks = OrderedDict()
if not os.path.isfile(filepath):
self.writeCtaLog(u'{0}文件不存在'.format(filepath))
return ticks
dt = None
csvReadFile = file(filepath, 'rb')
df = pd.read_csv(filepath, encoding='gbk')
df.columns = ['date', 'time', 'lastPrice', 'lastVolume', 'totalInterest', 'position',
'bidPrice1', 'bidVolume1', 'bidPrice2', 'bidVolume2', 'bidPrice3', 'bidVolume3',
'askPrice1', 'askVolume1', 'askPrice2', 'askVolume2', 'askPrice3', 'askVolume3', 'BS']
self.writeCtaLog(u'加载{0}'.format(filepath))
for i in range(0, len(df)):
# 日期, 时间, 成交价, 成交量, 总量, 属性(持仓增减), B1价, B1量, B2价, B2量, B3价, B3量, S1价, S1量, S2价, S2量, S3价, S3量, BS
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
row = df.iloc[i].to_dict()
tick = CtaTickData()
tick.vtSymbol = vtSymbol
tick.symbol = vtSymbol
tick.date = row['date']
tick.tradingDay = tickDate.strftime('%Y%m%d')
tick.time = row['time']
try:
tick.datetime = datetime.strptime(tick.date + ' ' + tick.time, '%Y-%m-%d %H:%M:%S')
except Exception as ex:
self.writeCtaError(u'日期转换错误:{0},{1}:{2}'.format(tick.date + ' ' + tick.time, Exception, ex))
continue
tick.date = tick.datetime.strftime('%Y%m%d')
# 修正毫秒
if tick.datetime.replace(microsecond=0) == dt:
# 与上一个tick的时间(去除毫秒后)相同,修改为500毫秒
tick.datetime = tick.datetime.replace(microsecond=500)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
else:
tick.datetime = tick.datetime.replace(microsecond=0)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
dt = tick.datetime
tick.lastPrice = float(row['lastPrice'])
tick.volume = int(float(row['lastVolume']))
tick.bidPrice1 = float(row['bidPrice1']) # 叫买价(价格低)
tick.bidVolume1 = int(float(row['bidVolume1']))
tick.askPrice1 = float(row['askPrice1']) # 叫卖价(价格高)
tick.askVolume1 = int(float(row['askVolume1']))
# 排除涨停/跌停的数据
if (tick.bidPrice1 == float('1.79769E308') and tick.bidVolume1 == 0) \
or (tick.askPrice1 == float('1.79769E308') and tick.askVolume1 == 0):
continue
dtStr = tick.date + ' ' + tick.time
if dtStr in ticks:
self.writeCtaError(u'日内数据重复,异常,数据时间为:{0}'.format(dtStr))
else:
ticks[dtStr] = tick
return ticks
def __loadNotStdArbTicks2(self, leg1MainPath, leg2MainPath, testday, leg1Symbol, leg2Symbol):
self.writeCtaLog(u'加载回测日期:{0}的价差tick'.format(testday))
# E:\Ticks\SQ\2014\201401\20140102\ag01_20140102.csv
leg1File = u'e:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}{5}_{3}.csv' \
.format(leg1MainPath, testday.strftime('%Y'), testday.strftime('%Y%m'), testday.strftime('%Y%m%d'),
self.symbol, leg1Symbol[-2:])
if not os.path.isfile(leg1File):
self.writeCtaLog(u'{0}文件不存在'.format(leg1File))
return
leg2File = u'e:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}{5}_{3}.csv' \
.format(leg2MainPath, testday.strftime('%Y'), testday.strftime('%Y%m'), testday.strftime('%Y%m%d'),
self.symbol, leg2Symbol[-2:])
if not os.path.isfile(leg2File):
self.writeCtaLog(u'{0}文件不存在'.format(leg2File))
return
leg1Ticks = self.__loadTicksFromFile2(filepath=leg1File, tickDate=testday, vtSymbol=leg1Symbol)
if len(leg1Ticks) == 0:
self.writeCtaLog(u'{0}读取tick数为空'.format(leg1File))
return
leg2Ticks = self.__loadTicksFromFile2(filepath=leg2File, tickDate=testday, vtSymbol=leg2Symbol)
if len(leg2Ticks) == 0:
self.writeCtaLog(u'{0}读取tick数为空'.format(leg1File))
return
leg1_tick = None
leg2_tick = None
while not (len(leg1Ticks) == 0 or len(leg2Ticks) == 0):
if leg1_tick is None and len(leg1Ticks) > 0:
leg1_tick = leg1Ticks.popitem(last=False)
if leg2_tick is None and len(leg2Ticks) > 0:
leg2_tick = leg2Ticks.popitem(last=False)
if leg1_tick is None and leg2_tick is not None:
self.newTick(leg2_tick[1])
leg2_tick = None
elif leg1_tick is not None and leg2_tick is None:
self.newTick(leg1_tick[1])
leg1_tick = None
elif leg1_tick is not None and leg2_tick is not None:
leg1 = leg1_tick[1]
leg2 = leg2_tick[1]
if leg1.datetime <= leg2.datetime:
self.newTick(leg1)
leg1_tick = None
else:
self.newTick(leg2)
leg2_tick = None
# ----------------------------------------------------------------------
def runBackTestingWithBarFile(self, filename):
"""运行回测(使用本地csv数据)
added by IncenseLee
"""
self.capital = self.initCapital # 更新设置期初资金
if not filename:
self.writeCtaLog(u'请指定回测数据文件')
return
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
import os
if not os.path.isfile(filename):
self.writeCtaLog(u'{0}文件不存在'.format(filename))
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
# 首先根据回测模式,确认要使用的数据类
if not self.mode == self.BAR_MODE:
self.writeCtaLog(u'文件仅支持bar模式,若扩展tick模式,需要修改本方法')
return
self.output(u'开始回测')
# self.strategy.inited = True
self.strategy.onInit()
self.output(u'策略初始化完成')
self.strategy.trading = True
self.strategy.onStart()
self.output(u'策略启动完成')
self.output(u'开始回放数据')
import csv
csvfile = file(filename, 'rb')
reader = csv.DictReader((line.replace('\0', '') for line in csvfile), delimiter=",")
for row in reader:
try:
bar = CtaBarData()
bar.symbol = self.symbol
bar.vtSymbol = self.symbol
# 从tb导出的csv文件
# bar.open = float(row['Open'])
# bar.high = float(row['High'])
# bar.low = float(row['Low'])
# bar.close = float(row['Close'])
# bar.volume = float(row['TotalVolume'])#
# barEndTime = datetime.strptime(row['Date']+' ' + row['Time'], '%Y/%m/%d %H:%M:%S')
# 从ricequant导出的csv文件
bar.open = float(row['open'])
bar.high = float(row['high'])
bar.low = float(row['low'])
bar.close = float(row['close'])
bar.volume = float(row['volume'])
barEndTime = datetime.strptime(row['index'], '%Y-%m-%d %H:%M:%S')
bar.tradingDay = row['trading_date']
# 使用Bar的开始时间作为datetime
bar.datetime = barEndTime - timedelta(seconds=self.barTimeInterval)
bar.date = bar.datetime.strftime('%Y-%m-%d')
bar.time = bar.datetime.strftime('%H:%M:%S')
if not (bar.datetime < self.dataStartDate or bar.datetime >= self.dataEndDate):
self.newBar(bar)
except Exception as ex:
self.writeCtaLog(u'{0}:{1}'.format(Exception, ex))
continue
# ----------------------------------------------------------------------
def runBacktestingWithMysql(self):
"""运行回测(使用Mysql数据)
added by IncenseLee
"""
self.capital = self.initCapital # 更新设置期初资金
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
dataClass = CtaBarData
func = self.newBar
else:
dataClass = CtaTickData
func = self.newTick
self.output(u'开始回测')
# self.strategy.inited = True
self.strategy.onInit()
self.output(u'策略初始化完成')
self.strategy.trading = True
self.strategy.onStart()
self.output(u'策略启动完成')
self.output(u'开始回放数据')
# 每次获取日期周期
intervalDays = 10
for i in range(0, (self.dataEndDate - self.dataStartDate).days + 1, intervalDays):
d1 = self.dataStartDate + timedelta(days=i)
if (self.dataEndDate - d1).days > intervalDays:
d2 = self.dataStartDate + timedelta(days=i + intervalDays - 1)
else:
d2 = self.dataEndDate
# 提取历史数据
self.loadDataHistoryFromMysql(self.symbol, d1, d2)
self.output(u'数据日期:{0} => {1}'.format(d1, d2))
# 将逐笔数据推送
for data in self.historyData:
# 记录最新的TICK数据
self.tick = self.__dataToTick(data)
self.dt = self.tick.datetime
# 处理限价单
self.crossLimitOrder()
self.crossStopOrder()
# 推送到策略引擎中
self.strategy.onTick(self.tick)
# 清空历史数据
self.historyData = []
self.output(u'数据回放结束')
# ----------------------------------------------------------------------
def runBacktesting(self):
"""运行回测"""
self.capital = self.initCapital # 更新设置期初资金
# 载入历史数据
# self.loadHistoryData()
self.loadHistoryDataFromMongo()
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
dataClass = CtaBarData
func = self.newBar
else:
dataClass = CtaTickData
func = self.newTick
self.output(u'开始回测')
self.strategy.inited = True
self.strategy.onInit()
self.output(u'策略初始化完成')
self.strategy.trading = True
self.strategy.onStart()
self.output(u'策略启动完成')
self.output(u'开始回放数据')
for d in self.dbCursor:
data = dataClass()
data.__dict__ = d
func(data)
self.output(u'数据回放结束')
def __sendOnBarEvent(self, bar):
"""发送Bar的事件"""
if self.eventEngine is not None:
eventType = EVENT_ON_BAR + '_' + self.symbol
event = Event(type_=eventType)
event.dict_['data'] = bar
self.eventEngine.put(event)
# ----------------------------------------------------------------------
def newBar(self, bar):
"""新的K线"""
self.bar = bar
self.dt = bar.datetime
self.crossLimitOrder() # 先撮合限价单
self.crossStopOrder() # 再撮合停止单
self.strategy.onBar(bar) # 推送K线到策略中
self.__sendOnBarEvent(bar) # 推送K线到事件
# ----------------------------------------------------------------------
def newTick(self, tick):
"""新的Tick"""
self.tick = tick
self.dt = tick.datetime
self.crossLimitOrder()
self.crossStopOrder()
self.strategy.onTick(tick)
# ----------------------------------------------------------------------
def initStrategy(self, strategyClass, setting=None):
"""
初始化策略
setting是策略的参数设置,如果使用类中写好的默认设置则可以不传该参数
"""
self.strategy = strategyClass(self, setting)
self.strategy.name = self.strategy.className
# ----------------------------------------------------------------------
def sendOrder(self, vtSymbol, orderType, price, volume, strategy):
"""发单"""
self.writeCtaLog(u'{0},{1},{2}@{3}'.format(vtSymbol, orderType, price, volume))
self.limitOrderCount += 1
orderID = str(self.limitOrderCount)
order = VtOrderData()
order.vtSymbol = vtSymbol
order.price = price
order.totalVolume = volume
order.status = STATUS_NOTTRADED # 刚提交尚未成交
order.orderID = orderID
order.vtOrderID = orderID
order.orderTime = str(self.dt)
# added by IncenseLee
order.gatewayName = self.gatewayName
# CTA委托类型映射
if orderType == CTAORDER_BUY:
order.direction = DIRECTION_LONG
order.offset = OFFSET_OPEN
elif orderType == CTAORDER_SELL:
order.direction = DIRECTION_SHORT
order.offset = OFFSET_CLOSE
elif orderType == CTAORDER_SHORT:
order.direction = DIRECTION_SHORT
order.offset = OFFSET_OPEN
elif orderType == CTAORDER_COVER:
order.direction = DIRECTION_LONG
order.offset = OFFSET_CLOSE
# modified by IncenseLee
key = u'{0}.{1}'.format(order.gatewayName, orderID)
# 保存到限价单字典中
self.workingLimitOrderDict[key] = order
self.limitOrderDict[key] = order
return key
# ----------------------------------------------------------------------
def cancelOrder(self, vtOrderID):
"""撤单"""
if vtOrderID in self.workingLimitOrderDict:
order = self.workingLimitOrderDict[vtOrderID]
order.status = STATUS_CANCELLED
order.cancelTime = str(self.dt)
del self.workingLimitOrderDict[vtOrderID]
def cancelOrders(self, symbol, offset=EMPTY_STRING):
"""撤销所有单"""
# Symbol参数:指定合约的撤单;
# OFFSET参数:指定Offset的撤单,缺省不填写时,为所有
self.writeCtaLog(u'从所有订单中撤销{0}\{1}'.format(offset, symbol))
for vtOrderID in self.workingLimitOrderDict.keys():
order = self.workingLimitOrderDict[vtOrderID]
if offset == EMPTY_STRING:
offsetCond = True
else:
offsetCond = order.offset == offset
if order.symbol == symbol and offsetCond:
self.writeCtaLog(
u'撤销订单:{0},{1} {2}@{3}'.format(vtOrderID, order.direction, order.price, order.totalVolume))
order.status = STATUS_CANCELLED
order.cancelTime = str(self.dt)
del self.workingLimitOrderDict[vtOrderID]
# ----------------------------------------------------------------------
def sendStopOrder(self, vtSymbol, orderType, price, volume, strategy):
"""发停止单(本地实现)"""
self.stopOrderCount += 1
stopOrderID = STOPORDERPREFIX + str(self.stopOrderCount)
so = StopOrder()
so.vtSymbol = vtSymbol
so.price = price
so.volume = volume
so.strategy = strategy
so.stopOrderID = stopOrderID
so.status = STOPORDER_WAITING
if orderType == CTAORDER_BUY:
so.direction = DIRECTION_LONG
so.offset = OFFSET_OPEN
elif orderType == CTAORDER_SELL:
so.direction = DIRECTION_SHORT
so.offset = OFFSET_CLOSE
elif orderType == CTAORDER_SHORT:
so.direction = DIRECTION_SHORT
so.offset = OFFSET_OPEN
elif orderType == CTAORDER_COVER:
so.direction = DIRECTION_LONG
so.offset = OFFSET_CLOSE
# 保存stopOrder对象到字典中
self.stopOrderDict[stopOrderID] = so
self.workingStopOrderDict[stopOrderID] = so
return stopOrderID
# ----------------------------------------------------------------------
def cancelStopOrder(self, stopOrderID):
"""撤销停止单"""
# 检查停止单是否存在
if stopOrderID in self.workingStopOrderDict:
so = self.workingStopOrderDict[stopOrderID]
so.status = STOPORDER_CANCELLED
del self.workingStopOrderDict[stopOrderID]
# ----------------------------------------------------------------------
def crossLimitOrder(self):
"""基于最新数据撮合限价单"""
# 先确定会撮合成交的价格
if self.mode == self.BAR_MODE:
buyCrossPrice = self.bar.low # 若买入方向限价单价格高于该价格,则会成交
sellCrossPrice = self.bar.high # 若卖出方向限价单价格低于该价格,则会成交
buyBestCrossPrice = self.bar.open # 在当前时间点前发出的买入委托可能的最优成交价
sellBestCrossPrice = self.bar.open # 在当前时间点前发出的卖出委托可能的最优成交价
vtSymbol = self.bar.vtSymbol
else:
buyCrossPrice = self.tick.askPrice1
sellCrossPrice = self.tick.bidPrice1
buyBestCrossPrice = self.tick.askPrice1
sellBestCrossPrice = self.tick.bidPrice1
vtSymbol = self.tick.vtSymbol
# 遍历限价单字典中的所有限价单
for orderID, order in self.workingLimitOrderDict.items():
# 判断是否会成交
buyCross = order.direction == DIRECTION_LONG and order.price >= buyCrossPrice and vtSymbol == order.vtSymbol
sellCross = order.direction == DIRECTION_SHORT and order.price <= sellCrossPrice and vtSymbol == order.vtSymbol
# 如果发生了成交
if buyCross or sellCross:
# 推送成交数据
self.tradeCount += 1 # 成交编号自增1
tradeID = str(self.tradeCount)
trade = VtTradeData()
trade.vtSymbol = order.vtSymbol
trade.tradeID = tradeID
trade.vtTradeID = tradeID
trade.orderID = order.orderID
trade.vtOrderID = order.orderID
trade.direction = order.direction
trade.offset = order.offset
# 以买入为例:
# 1. 假设当根K线的OHLC分别为:100, 125, 90, 110
# 2. 假设在上一根K线结束(也是当前K线开始)的时刻,策略发出的委托为限价105
# 3. 则在实际中的成交价会是100而不是105,因为委托发出时市场的最优价格是100
if buyCross:
trade.price = min(order.price, buyBestCrossPrice)
self.strategy.pos += order.totalVolume
else:
trade.price = max(order.price, sellBestCrossPrice)
self.strategy.pos -= order.totalVolume
trade.volume = order.totalVolume
trade.tradeTime = str(self.dt)
trade.dt = self.dt
self.strategy.onTrade(trade)
self.tradeDict[tradeID] = trade
self.writeCtaLog(u'TradeId:{0}'.format(tradeID))
# 推送委托数据
order.tradedVolume = order.totalVolume
order.status = STATUS_ALLTRADED
self.strategy.onOrder(order)
# 从字典中删除该限价单
try:
del self.workingLimitOrderDict[orderID]
except Exception as ex:
self.writeCtaError(u'{0}:{1}'.format(Exception, ex))
# 实时计算模式
if self.calculateMode == self.REALTIME_MODE:
self.realtimeCalculate()
# ----------------------------------------------------------------------
def crossStopOrder(self):
"""基于最新数据撮合停止单"""
# 先确定会撮合成交的价格,这里和限价单规则相反
if self.mode == self.BAR_MODE:
buyCrossPrice = self.bar.high # 若买入方向停止单价格低于该价格,则会成交
sellCrossPrice = self.bar.low # 若卖出方向限价单价格高于该价格,则会成交
bestCrossPrice = self.bar.open # 最优成交价,买入停止单不能低于,卖出停止单不能高于
vtSymbol = self.bar.vtSymbol
else:
buyCrossPrice = self.tick.lastPrice
sellCrossPrice = self.tick.lastPrice
bestCrossPrice = self.tick.lastPrice
vtSymbol = self.tick.vtSymbol
# 遍历停止单字典中的所有停止单
for stopOrderID, so in self.workingStopOrderDict.items():
# 判断是否会成交
buyCross = so.direction == DIRECTION_LONG and so.price <= buyCrossPrice and vtSymbol == so.vtSymbol
sellCross = so.direction == DIRECTION_SHORT and so.price >= sellCrossPrice and vtSymbol == so.vtSymbol
# 如果发生了成交
if buyCross or sellCross:
# 推送成交数据
self.tradeCount += 1 # 成交编号自增1
tradeID = str(self.tradeCount)
trade = VtTradeData()
trade.vtSymbol = so.vtSymbol
trade.tradeID = tradeID
trade.vtTradeID = tradeID
if buyCross:
self.strategy.pos += so.volume
trade.price = max(bestCrossPrice, so.price)
else:
self.strategy.pos -= so.volume
trade.price = min(bestCrossPrice, so.price)
self.limitOrderCount += 1
orderID = str(self.limitOrderCount)
trade.orderID = orderID
trade.vtOrderID = orderID
trade.direction = so.direction
trade.offset = so.offset
trade.volume = so.volume
trade.tradeTime = str(self.dt)
trade.dt = self.dt
self.strategy.onTrade(trade)
self.tradeDict[tradeID] = trade
# 推送委托数据
so.status = STOPORDER_TRIGGERED
order = VtOrderData()
order.vtSymbol = so.vtSymbol
order.symbol = so.vtSymbol
order.orderID = orderID
order.vtOrderID = orderID
order.direction = so.direction
order.offset = so.offset
order.price = so.price
order.totalVolume = so.volume
order.tradedVolume = so.volume
order.status = STATUS_ALLTRADED
order.orderTime = trade.tradeTime
self.strategy.onOrder(order)
self.limitOrderDict[orderID] = order
# 从字典中删除该限价单
del self.workingStopOrderDict[stopOrderID]
# 若采用实时计算净值
if self.calculateMode == self.REALTIME_MODE:
self.realtimeCalculate()
# ----------------------------------------------------------------------
def insertData(self, dbName, collectionName, data):
"""考虑到回测中不允许向数据库插入数据,防止实盘交易中的一些代码出错"""
pass
# ----------------------------------------------------------------------
def loadBar(self, dbName, collectionName, startDate):
"""直接返回初始化数据列表中的Bar"""
return self.initData
# ----------------------------------------------------------------------
def loadTick(self, dbName, collectionName, startDate):
"""直接返回初始化数据列表中的Tick"""
return self.initData
# ----------------------------------------------------------------------
def writeCtaLog(self, content):
"""记录日志"""
# log = str(self.dt) + ' ' + content
# self.logList.append(log)
# 写入本地log日志
logging.info(content)
def writeCtaError(self, content):
"""记录异常"""
self.output(content)
self.writeCtaLog(content)
# ----------------------------------------------------------------------
def output(self, content):
"""输出内容"""
print str(datetime.now()) + "\t" + content
#TODO understand this function
## this is where strategy runs
def realtimeCalculate(self):
"""实时计算交易结果"""
resultDict = OrderedDict() # 交易结果记录
longTrade = [] # 未平仓的多头交易
shortTrade = [] # 未平仓的空头交易
longid = EMPTY_STRING
shortid = EMPTY_STRING
# 对交易记录逐一处理
for tradeid in self.tradeDict.keys():
trade = self.tradeDict[tradeid]
# 多头交易
if trade.direction == DIRECTION_LONG:
# 如果尚无空头交易
if not shortTrade:
longTrade.append(trade)
longid = tradeid
# 当前多头交易为平空
else:
gId = tradeid # 交易组(多个平仓数为一组)
gr = None # 组合的交易结果
coverVolume = trade.volume
while coverVolume > 0:
if len(shortTrade) == 0:
self.writeCtaError(u'异常,没有开空仓的数据')
break
pop_indexs = [i for i, val in enumerate(shortTrade) if val.vtSymbol == trade.vtSymbol]
if len(pop_indexs) < 1:
self.writeCtaLog(u'没有对应的symbol:{0}开空仓数据'.format(trade.vtSymbol))
break
pop_index = pop_indexs[0]
# 从未平仓的空头交易
entryTrade = shortTrade.pop(pop_index)
# 开空volume,不大于平仓volume
if coverVolume >= entryTrade.volume:
self.writeCtaLog(
u'coverVolume:{0} >= entryTrade.volume:{1}'.format(coverVolume, entryTrade.volume))
coverVolume = coverVolume - entryTrade.volume
result = TradingResult(entryTrade.price, trade.price, -entryTrade.volume,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
t = {}
t['vtSymbol'] = entryTrade.vtSymbol
t['OpenTime'] = entryTrade.tradeTime
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Short'
t['CloseTime'] = trade.tradeTime
t['ClosePrice'] = trade.price
t['Volume'] = entryTrade.volume
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{6} [{7}:开空{0},short:{1}]-[{8}:平空{2},cover:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, entryTrade.volume, result.pnl,
gId, shortid, tradeid))
if type(gr) == type(None):
if coverVolume > 0:
# 属于组合
gr = copy.deepcopy(result)
# 删除开空交易单
del self.tradeDict[entryTrade.tradeID]
else:
# 不属于组合
resultDict[entryTrade.dt] = result
# 删除平空交易单,
del self.tradeDict[trade.tradeID]
# 删除开空交易单
del self.tradeDict[entryTrade.tradeID]
else:
# 更新组合的数据
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
# 删除开空交易单
del self.tradeDict[entryTrade.tradeID]
# 所有仓位平完
if coverVolume == 0:
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
# 删除平空交易单,
del self.tradeDict[trade.tradeID]
# 开空volume,大于平仓volume,需要更新减少tradeDict的数量。
else:
self.writeCtaLog(
u'Short volume:{0} > Cover volume:{1},需要更新减少tradeDict的数量。'.format(entryTrade.volume,
coverVolume))
shortVolume = entryTrade.volume - coverVolume
result = TradingResult(entryTrade.price, trade.price, -coverVolume,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
t = {}
t['vtSymbol'] = entryTrade.vtSymbol
t['OpenTime'] = entryTrade.tradeTime
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Short'
t['CloseTime'] = trade.tradeTime
t['ClosePrice'] = trade.price
t['Volume'] = coverVolume
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{6} [{7}:开空{0},short:{1}]-[{8}:平空{2},cover:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, coverVolume, result.pnl,
gId, shortid, tradeid))
# 更新(减少)开仓单的volume,重新推进开仓单列表中
entryTrade.volume = shortVolume
shortTrade.append(entryTrade)
coverVolume = 0
if type(gr) == type(None):
resultDict[entryTrade.dt] = result
else:
# 更新组合的数据
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
# 删除平空交易单,
del self.tradeDict[trade.tradeID]
if type(gr) != type(None):
self.writeCtaLog(u'组合净盈亏:{0}'.format(gr.pnl))
self.writeCtaLog(u'-------------')
# 空头交易
else:
# 如果尚无多头交易
if not longTrade:
shortTrade.append(trade)
shortid = tradeid
# 当前空头交易为平多
else:
gId = tradeid # 交易组(多个平仓数为一组) s
gr = None # 组合的交易结果
sellVolume = trade.volume
self.output(u'多平:{0}'.format(sellVolume))
self.writeCtaLog(u'多平:{0}'.format(sellVolume))
while sellVolume > 0:
if len(longTrade) == 0:
self.writeCtaError(u'异常,没有开多单')
break
pop_indexs = [i for i, val in enumerate(longTrade) if val.vtSymbol == trade.vtSymbol]
if len(pop_indexs) < 1:
self.writeCtaLog(u'没有对应的symbol{0}开多仓数据'.format(trade.vtSymbol))
break
pop_index = pop_indexs[0]
entryTrade = longTrade.pop(pop_index)
# 开多volume,不大于平仓volume
if sellVolume >= entryTrade.volume:
self.writeCtaLog(
u'Sell Volume:{0} >= Entry Volume:{1}'.format(sellVolume, entryTrade.volume))
sellVolume = sellVolume - entryTrade.volume
result = TradingResult(entryTrade.price, trade.price, entryTrade.volume,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
t = {}
t['vtSymbol'] = entryTrade.vtSymbol
t['OpenTime'] = entryTrade.tradeTime
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Long'
t['CloseTime'] = trade.tradeTime
t['ClosePrice'] = trade.price
t['Volume'] = entryTrade.volume
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{6} [{7}:开多{0},buy:{1}]-[{8}.平多{2},sell:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, entryTrade.volume, result.pnl,
gId, longid, tradeid))
if type(gr) == type(None):
if sellVolume > 0:
# 属于组合
gr = copy.deepcopy(result)
# 删除开多交易单
del self.tradeDict[entryTrade.tradeID]
else:
# 不属于组合
resultDict[entryTrade.dt] = result
# 删除平多交易单,
del self.tradeDict[trade.tradeID]
# 删除开多交易单
del self.tradeDict[entryTrade.tradeID]
else:
# 更新组合的数据
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
# 删除开多交易单
del self.tradeDict[entryTrade.tradeID]
if sellVolume == 0:
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
# 删除平多交易单,
del self.tradeDict[trade.tradeID]
# 开多volume,大于平仓volume,需要更新减少tradeDict的数量。
else:
longVolume = entryTrade.volume - sellVolume
self.writeCtaLog(u'Long Volume:{0} > sell Volume:{1}'.format(entryTrade.volume, sellVolume))
result = TradingResult(entryTrade.price, trade.price, sellVolume,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
t = {}
t['vtSymbol'] = entryTrade.vtSymbol
t['OpenTime'] = entryTrade.tradeTime
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Long'
t['CloseTime'] = trade.tradeTime
t['ClosePrice'] = trade.price
t['Volume'] = sellVolume
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{6} [{7}:开多{0},buy:{1}]-[{8}.平多{2},sell:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, sellVolume, result.pnl,
gId, longid, tradeid))
# 减少开多volume,重新推进开多单列表中
entryTrade.volume = longVolume
longTrade.append(entryTrade)
sellVolume = 0
if type(gr) == type(None):
resultDict[entryTrade.dt] = result
else:
# 更新组合的数据
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
# 删除平多交易单,
del self.tradeDict[trade.tradeID]
if type(gr) != type(None):
self.writeCtaLog(u'组合净盈亏:{0}'.format(gr.pnl))
self.writeCtaLog(u'-------------')
# 计算仓位比例
occupyMoney = EMPTY_FLOAT
occupyLongVolume = EMPTY_INT
occupyShortVolume = EMPTY_INT
if len(longTrade) > 0: # 如果未平仓的多头交易大于0
for t in longTrade:
# 账户已占用保证金=合约价格*开仓手数*合约大小*保证金比例
if t.vtSymbol in BZJ_DL:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_DL[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if t.vtSymbol in BZJ_ZZ:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_ZZ[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if t.vtSymbol in BZJ_SQ:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_SQ[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if len(shortTrade) > 0: # 如果未平仓的空头交易大于0
for t in shortTrade:
if t.vtSymbol in BZJ_DL:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_DL[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if t.vtSymbol in BZJ_ZZ:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_ZZ[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if t.vtSymbol in BZJ_SQ:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_SQ[t.vtSymbol]
occupyLongVolume += abs(t.volume)
# TODO:这里关于t.symbol的数据可能不够准确,下方的是最初版本
# if len(shortTrade) > 0: # 如果未平仓的空头交易大于0
# for t in shortTrade:
# occupyMoney += t.price * abs(t.volume) * self.size * 0.11
# occupyLongVolume += abs(t.volume)
self.output(u'occupyLongVolume:{0},occupyShortVolume:{1}'.format(occupyLongVolume, occupyShortVolume))
self.writeCtaLog(u'occupyLongVolume:{0},occupyShortVolume:{1}'.format(occupyLongVolume, occupyShortVolume))
# 最大持仓
self.maxVolume = max(self.maxVolume, max(occupyLongVolume, occupyShortVolume))
# 账户剩余可用资金
self.avaliable = self.capital - occupyMoney
# 资金剩余可用比例
self.percent = round(float(occupyMoney * 100 / self.capital), 2)
# 检查是否有平交易
if not resultDict:
if len(longTrade) > 0:
msg = u'持多仓{0},资金占用:{1},仓位比例:{2}'.format(occupyLongVolume, occupyMoney, self.percent)
self.output(msg)
self.writeCtaLog(msg)
elif len(shortTrade) > 0:
msg = u'持空仓{0},资金占用:{1},仓位比例:{2}'.format(occupyShortVolume, occupyMoney, self.percent)
self.output(msg)
self.writeCtaLog(msg)
return
# 对交易结果汇总统计
for time, result in resultDict.items():
if result.pnl > 0:
self.winningResult += 1
self.totalWinning += result.pnl
else:
self.losingResult += 1
self.totalLosing += result.pnl
self.capital += result.pnl
self.maxCapital = max(self.capital, self.maxCapital)
# self.maxVolume = max(self.maxVolume, result.volume)
drawdown = self.capital - self.maxCapital
drawdownRate = round(float(drawdown * 100 / self.maxCapital), 4)
self.pnlList.append(result.pnl)
self.timeList.append(time)
self.capitalList.append(self.capital)
self.drawdownList.append(drawdown)
self.drawdownRateList.append(drawdownRate)
self.totalResult += 1
self.totalTurnover += result.turnover
self.totalCommission += result.commission
self.totalSlippage += result.slippage
self.output(u'[{5}],{6} Vol:{0},盈亏:{1},回撤:{2}/{3},权益:{4}'.
format(abs(result.volume), result.pnl, drawdown,
drawdownRate, self.capital, result.groupId, time))
# 重新计算一次avaliable
self.avaliable = self.capital - occupyMoney
self.percent = round(float(occupyMoney * 100 / self.capital), 2)
# ----------------------------------------------------------------------
def calculateBacktestingResult(self):
"""
计算回测结果
Modified by Incense Lee
增加了支持逐步加仓的计算:
例如,前面共有6次开仓(1手开仓+5次加仓,每次1手),平仓只有1次(六手)。那么,交易次数是6次(开仓+平仓)。
暂不支持每次加仓数目不一致的核对(因为比较复杂)
增加组合的支持。(组合中,仍然按照1手逐步加仓和多手平仓的方法,即使启用了复利模式,也仍然按照这个规则,只是在计算收益时才乘以系数)
增加期初权益,每次交易后的权益,可用资金,仓位比例。
"""
self.output(u'计算回测结果')
# 首先基于回测后的成交记录,计算每笔交易的盈亏
resultDict = OrderedDict() # 交易结果记录
longTrade = [] # 未平仓的多头交易
shortTrade = [] # 未平仓的空头交易
i = 1
tradeUnit = 1
longid = EMPTY_STRING
shortid = EMPTY_STRING
for tradeid in self.tradeDict.keys():
trade = self.tradeDict[tradeid]
# 多头交易
if trade.direction == DIRECTION_LONG:
# 如果尚无空头交易
if not shortTrade:
longTrade.append(trade)
longid = tradeid
# 当前多头交易为平空
else:
gId = i # 交易组(多个平仓数为一组)
gt = 1 # 组合的交易次数
gr = None # 组合的交易结果
if trade.volume > tradeUnit:
self.writeCtaLog(u'平仓数{0},组合编号:{1}'.format(trade.volume, gId))
gt = int(trade.volume / tradeUnit)
for tv in range(gt):
entryTrade = shortTrade.pop(0)
result = TradingResult(entryTrade.price, trade.price, -tradeUnit,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
if tv == 0:
if gt == 1:
resultDict[entryTrade.dt] = result
else:
gr = copy.deepcopy(result)
else:
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
if tv == gt - 1:
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
t = {}
t['OpenTime'] = entryTrade.tradeTime.strftime('%Y/%m/%d %H:%M:%S')
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Short'
t['CloseTime'] = trade.tradeTime.strftime('%Y/%m/%d %H:%M:%S')
t['ClosePrice'] = trade.price
t['Volume'] = tradeUnit
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{9}@{6} [{7}:开空{0},short:{1}]-[{8}:平空{2},cover:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, tradeUnit, result.pnl,
i, shortid, tradeid, gId))
i = i + 1
if type(gr) != type(None):
self.writeCtaLog(u'组合净盈亏:{0}'.format(gr.pnl))
self.writeCtaLog(u'-------------')
# 空头交易
else:
# 如果尚无多头交易
if not longTrade:
shortTrade.append(trade)
shortid = tradeid
# 当前空头交易为平多
else:
gId = i # 交易组(多个平仓数为一组)
gt = 1 # 组合的交易次数
gr = None # 组合的交易结果
if trade.volume > tradeUnit:
self.writeCtaLog(u'平仓数{0},组合编号:{1}'.format(trade.volume, gId))
gt = int(trade.volume / tradeUnit)
for tv in range(gt):
entryTrade = longTrade.pop(0)
result = TradingResult(entryTrade.price, trade.price, tradeUnit,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
if tv == 0:
if gt == 1:
resultDict[entryTrade.dt] = result
else:
gr = copy.deepcopy(result)
else:
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
if tv == gt - 1:
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
t = {}
t['OpenTime'] = entryTrade.tradeTime.strftime('%Y/%m/%d %H:%M:%S')
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Long'
t['CloseTime'] = trade.tradeTime.strftime('%Y/%m/%d %H:%M:%S')
t['ClosePrice'] = trade.price
t['Volume'] = tradeUnit
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{9}@{6} [{7}:开多{0},buy:{1}]-[{8}.平多{2},sell:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, tradeUnit, result.pnl,
i, longid, tradeid, gId))
i = i + 1
if type(gr) != type(None):
self.writeCtaLog(u'组合净盈亏:{0}'.format(gr.pnl))
self.writeCtaLog(u'-------------')
# 检查是否有交易
if not resultDict:
self.output(u'无交易结果')
return {}
# 然后基于每笔交易的结果,我们可以计算具体的盈亏曲线和最大回撤等
"""
initCapital = 40000 # 期初资金
capital = initCapital # 资金
maxCapital = initCapital # 资金最高净值
maxPnl = 0 # 最高盈利
minPnl = 0 # 最大亏损
maxVolume = 1 # 最大仓位数
wins = 0
totalResult = 0 # 总成交数量
totalTurnover = 0 # 总成交金额(合约面值)
totalCommission = 0 # 总手续费
totalSlippage = 0 # 总滑点
timeList = [] # 时间序列
pnlList = [] # 每笔盈亏序列
capitalList = [] # 盈亏汇总的时间序列
drawdownList = [] # 回撤的时间序列
drawdownRateList = [] # 最大回撤比例的时间序列
"""
drawdown = 0 # 回撤
compounding = 1 # 简单的复利基数(如果资金是期初资金的x倍,就扩大开仓比例,例如3w开1手,6w开2手,12w开4手)
for time, result in resultDict.items():
# 是否使用简单复利
if self.usageCompounding:
compounding = int(self.capital / self.initCapital)
if result.pnl > 0:
self.winningResult += 1
self.totalWinning += result.pnl
else:
self.losingResult += 1
self.totalLosing += result.pnl
self.capital += result.pnl * compounding
self.maxCapital = max(self.capital, self.maxCapital)
self.maxVolume = max(self.maxVolume, result.volume * compounding)
drawdown = self.capital - self.maxCapital
drawdownRate = round(float(drawdown * 100 / self.maxCapital), 4)
self.pnlList.append(result.pnl * compounding)
self.timeList.append(time)
self.capitalList.append(self.capningital)
self.drawdownList.append(drawdown)
self.drawdownRateList.append(drawdownRate)
self.totalResult += 1
self.totalTurnover += result.turnover * compounding
self.totalCommission += result.commission * compounding
self.totalSlippage += result.slippage * compounding
# ---------------------------------------------------------------------
def exportTradeResult(self):
"""到处回测结果表"""
if not self.exportTradeList:
return
csvOutputFile = os.getcwd() + '/TestLogs/Output_{0}.csv'.format(datetime.now().strftime('%Y%m%d_%H%M'))
import csv
csvWriteFile = file(csvOutputFile, 'wb')
fieldnames = ['vtSymbol', 'OpenTime', 'OpenPrice', 'Direction', 'CloseTime', 'ClosePrice', 'Volume', 'Profit']
writer = csv.DictWriter(f=csvWriteFile, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
for row in self.exportTradeList:
writer.writerow(row)
def getResult(self):
# 返回回测结果
d = {}
d['initCapital'] = self.initCapital # 初始资金
d['capital'] = self.capital - self.initCapital # 总资金
d['maxCapital'] = self.maxCapital # 账户最大资金
if len(self.pnlList) == 0:
return {}
d['maxPnl'] = max(self.pnlList) # 每笔最大盈利
d['minPnl'] = min(self.pnlList) # 每笔最大亏损
d['maxVolume'] = self.maxVolume
d['totalResult'] = self.totalResult
d['totalTurnover'] = self.totalTurnover
d['totalCommission'] = self.totalCommission
d['totalSlippage'] = self.totalSlippage
d['timeList'] = self.timeList
d['pnlList'] = self.pnlList
d['capitalList'] = self.capitalList
d['drawdownList'] = self.drawdownList
d['drawdownRateList'] = self.drawdownRateList
d['winningRate'] = round(100 * self.winningResult / len(self.pnlList), 4)
averageWinning = 0 # 这里把数据都初始化为0
averageLosing = 0
profitLossRatio = 0
if self.winningResult:
averageWinning = self.totalWinning / self.winningResult # 平均每笔盈利
if self.losingResult:
averageLosing = self.totalLosing / self.losingResult # 平均每笔亏损
if averageLosing:
profitLossRatio = -averageWinning / averageLosing # 盈亏比
d['averageWinning'] = averageWinning
d['averageLosing'] = averageLosing
d['profitLossRatio'] = profitLossRatio
return d
# ----------------------------------------------------------------------
def showBacktestingResult(self):
"""显示回测结果"""
if self.calculateMode != self.REALTIME_MODE:
self.calculateBacktestingResult()
d = self.getResult()
if len(d) == 0:
self.output(u'无交易结果')
return
# 导出交易清单
self.exportTradeResult()
# 输出
self.output('-' * 30)
self.output(u'第一笔交易:\t%s' % d['timeList'][0])
self.output(u'最后一笔交易:\t%s' % d['timeList'][-1])
self.output(u'总交易次数:\t%s' % formatNumber(d['totalResult']))
self.output(u'期初资金:\t%s' % formatNumber(d['initCapital']))
self.output(u'总盈亏:\t%s' % formatNumber(d['capital']))
self.output(u'资金最高净值:\t%s' % formatNumber(d['maxCapital']))
self.output(u'每笔最大盈利:\t%s' % formatNumber(d['maxPnl']))
self.output(u'每笔最大亏损:\t%s' % formatNumber(d['minPnl']))
self.output(u'净值最大回撤: \t%s' % formatNumber(min(d['drawdownList'])))
self.output(u'净值最大回撤率: \t%s' % formatNumber(min(d['drawdownRateList'])))
self.output(u'胜率:\t%s' % formatNumber(d['winningRate']))
self.output(u'盈利交易平均值\t%s' % formatNumber(d['averageWinning']))
self.output(u'亏损交易平均值\t%s' % formatNumber(d['averageLosing']))
self.output(u'盈亏比:\t%s' % formatNumber(d['profitLossRatio']))
self.output(u'最大持仓:\t%s' % formatNumber(d['maxVolume']))
self.output(u'平均每笔盈利:\t%s' % formatNumber(d['capital'] / d['totalResult']))
self.output(u'盈利总额:\t%s' % formatNumber(d['capital']))
self.output(u'平均每笔滑点成本:\t%s' % formatNumber(d['totalSlippage'] / d['totalResult']))
self.output(u'滑点成本总额:\t%s' % formatNumber(d['totalSlippage']))
self.output(u'平均每笔佣金:\t%s' % formatNumber(d['totalCommission'] / d['totalResult']))
self.output(u'佣金总额:\t%s' % formatNumber(d['totalCommission']))
# 绘图
import matplotlib.pyplot as plt
pCapital = plt.subplot(3, 1, 1)
pCapital.set_ylabel("capital")
pCapital.plot(d['capitalList'])
pDD = plt.subplot(3, 1, 2)
pDD.set_ylabel("DD")
pDD.bar(range(len(d['drawdownList'])), d['drawdownList'])
pPnl = plt.subplot(3, 1, 3)
pPnl.set_ylabel("pnl")
pPnl.hist(d['pnlList'], bins=50)
plt.show()
# ----------------------------------------------------------------------
def putStrategyEvent(self, name):
"""发送策略更新事件,回测中忽略"""
pass
# ----------------------------------------------------------------------
def setSlippage(self, slippage):
"""设置滑点点数"""
self.slippage = slippage
# ----------------------------------------------------------------------
def setSize(self, size):
"""设置合约大小"""
self.size = size
# ----------------------------------------------------------------------
def setRate(self, rate):
"""设置佣金比例"""
self.rate = float(rate)
# ----------------------------------------------------------------------
def runOptimization(self, strategyClass, optimizationSetting):
"""优化参数"""
# 获取优化设置
settingList = optimizationSetting.generateSetting()
targetName = optimizationSetting.optimizeTarget
# 检查参数设置问题
if not settingList or not targetName:
self.output(u'优化设置有问题,请检查')
# 遍历优化
resultList = []
for setting in settingList:
self.clearBacktestingResult()
self.output('-' * 30)
self.output('setting: %s' % str(setting))
self.initStrategy(strategyClass, setting)
self.runBacktesting()
d = self.calculateBacktestingResult()
try:
targetValue = d[targetName]
except KeyError:
targetValue = 0
resultList.append(([str(setting)], targetValue))
# 显示结果
resultList.sort(reverse=True, key=lambda result: result[1])
self.output('-' * 30)
self.output(u'优化结果:')
for result in resultList:
self.output(u'%s: %s' % (result[0], result[1]))
# ----------------------------------------------------------------------
def clearBacktestingResult(self):
"""清空之前回测的结果"""
# 清空限价单相关
self.limitOrderCount = 0
self.limitOrderDict.clear()
self.workingLimitOrderDict.clear()
# 清空停止单相关
self.stopOrderCount = 0
self.stopOrderDict.clear()
self.workingStopOrderDict.clear()
# 清空成交相关
self.tradeCount = 0
self.tradeDict.clear()
#add by xy 14 Aug 2017
#get lot shares by money
def moneyPerLot(self, price, symbol):
oneLotM = None
if symbol in BZJ_DL:
oneLotM = price * self.size * BZJ_DL[symbol]
if symbol in BZJ_ZZ:
oneLotM = price * self.size * BZJ_ZZ[symbol]
if symbol in BZJ_SQ:
oneLotM = price * self.size * BZJ_SQ[symbol]
return oneLotM
########################################################################
class TradingResult(object):
"""每笔交易的结果"""
# ----------------------------------------------------------------------
def __init__(self, entry, exit, volume, rate, slippage, size, groupId, fixcommission=EMPTY_FLOAT):
"""Constructor"""
self.entry = entry # 开仓价格
self.exit = exit # 平仓价格
self.volume = volume # 交易数量(+/-代表方向)
self.groupId = groupId # 主交易ID(针对多手平仓)
self.turnover = (self.entry + self.exit) * size # 成交金额
if fixcommission:
self.commission = fixcommission * self.volume
else:
self.commission = round(float(self.turnover * rate), 4) # 手续费成本
self.slippage = slippage * 2 * size # 滑点成本
self.pnl = ((self.exit - self.entry) * volume * size
- self.commission - self.slippage) # 净盈亏
########################################################################
class OptimizationSetting(object):
"""优化设置"""
# ----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.paramDict = OrderedDict()
self.optimizeTarget = '' # 优化目标字段
# ----------------------------------------------------------------------
def addParameter(self, name, start, end, step):
"""增加优化参数"""
if end <= start:
print u'参数起始点必须小于终止点'
return
if step <= 0:
print u'参数布进必须大于0'
return
l = []
param = start
while param <= end:
l.append(param)
param += step
self.paramDict[name] = l
# ----------------------------------------------------------------------
def generateSetting(self):
"""生成优化参数组合"""
# 参数名的列表
nameList = self.paramDict.keys()
paramList = self.paramDict.values()
# 使用迭代工具生产参数对组合
productList = list(product(*paramList))
# 把参数对组合打包到一个个字典组成的列表中
settingList = []
for p in productList:
d = dict(zip(nameList, p))
settingList.append(d)
return settingList
# ----------------------------------------------------------------------
def setOptimizeTarget(self, target):
"""设置优化目标字段"""
self.optimizeTarget = target
# ----------------------------------------------------------------------
def formatNumber(n):
"""格式化数字到字符串"""
n = round(n, 2) # 保留两位小数
return format(n, ',') # 加上千分符
if __name__ == '__main__':
# 以下内容是一段回测脚本的演示,用户可以根据自己的需求修改
# 建议使用ipython notebook或者spyder来做回测
# 同样可以在命令模式下进行回测(一行一行输入运行)
from ctaDemo import *
# 创建回测引擎
engine = BacktestingEngine()
# 设置引擎的回测模式为K线
engine.setBacktestingMode(engine.BAR_MODE)
# 设置回测用的数据起始日期
engine.setStartDate('20110101')
# 载入历史数据到引擎中
engine.setDatabase(MINUTE_DB_NAME, 'IF0000')
# 设置产品相关参数
engine.setSlippage(0.2) # 股指1跳
engine.setRate(0.3 / 10000) # 万0.3
engine.setSize(300) # 股指合约大小
# 在引擎中创建策略对象
engine.initStrategy(DoubleEmaDemo, {})
# 开始跑回测
engine.runBacktesting()
# 显示回测结果
# spyder或者ipython notebook中运行时,会弹出盈亏曲线图
# 直接在cmd中回测则只会打印一些回测数值
engine.showBacktestingResult()<|fim▁end|> | self.tradeCount = 0 # 成交编号
self.tradeDict = OrderedDict() # 成交字典
|
<|file_name|>test_rotation.rs<|end_file_name|><|fim▁begin|>extern crate mp4parse_capi;
use std::io::Read;
use mp4parse_capi::*;
extern fn buf_read(buf: *mut u8, size: usize, userdata: *mut std::os::raw::c_void) -> isize {
let input: &mut std::fs::File = unsafe { &mut *(userdata as *mut _) };
let mut buf = unsafe { std::slice::from_raw_parts_mut(buf, size) };
match input.read(&mut buf) {
Ok(n) => n as isize,
Err(_) => -1,
}
}
#[test]
fn parse_rotation() {
let mut file = std::fs::File::open("tests/video_rotation_90.mp4").expect("Unknown file");
let io = Mp4parseIo {
read: Some(buf_read),
userdata: &mut file as *mut _ as *mut std::os::raw::c_void
};
unsafe {
let parser = mp4parse_new(&io);<|fim▁hole|> assert_eq!(rv, Mp4parseStatus::Ok);
let mut counts: u32 = 0;
rv = mp4parse_get_track_count(parser, &mut counts);
assert_eq!(rv, Mp4parseStatus::Ok);
assert_eq!(counts, 1);
let mut video = Mp4parseTrackVideoInfo::default();
let rv = mp4parse_get_track_video_info(parser, 0, &mut video);
assert_eq!(rv, Mp4parseStatus::Ok);
assert_eq!(video.rotation, 90);
mp4parse_free(parser);
}
}<|fim▁end|> |
let mut rv = mp4parse_read(parser); |
<|file_name|>condition.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Timo Savola. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package condition
type C int
const (
Eq = C(iota)
Ne
GeS
GtS
GeU
GtU
LeS
LtS
LeU
LtU<|fim▁hole|> OrderedAndGe
OrderedAndGt
OrderedAndLe
OrderedAndLt
UnorderedOrEq
UnorderedOrNe
UnorderedOrGe
UnorderedOrGt
UnorderedOrLe
UnorderedOrLt
)
const (
MinOrderedAndCondition = OrderedAndEq
MinUnorderedOrCondition = UnorderedOrEq
)
var Inverted = [22]C{
Eq: Ne,
Ne: Eq,
GeS: LtS,
GtS: LeS,
GeU: LtU,
GtU: LeU,
LeS: GtS,
LtS: GeS,
LeU: GtU,
LtU: GeU,
OrderedAndEq: UnorderedOrNe,
OrderedAndNe: UnorderedOrEq,
OrderedAndGe: UnorderedOrLt,
OrderedAndGt: UnorderedOrLe,
OrderedAndLe: UnorderedOrGt,
OrderedAndLt: UnorderedOrGe,
UnorderedOrEq: OrderedAndNe,
UnorderedOrNe: OrderedAndEq,
UnorderedOrGe: OrderedAndLt,
UnorderedOrGt: OrderedAndLe,
UnorderedOrLe: OrderedAndGt,
UnorderedOrLt: OrderedAndGe,
}
var strings = []string{
Eq: "equal",
Ne: "not-equal",
GeS: "signed greater-or-equal",
GtS: "signed greater",
GeU: "unsigned greater-or-equal",
GtU: "unsigned greater",
LeS: "signed less-or-equal",
LtS: "signed less",
LeU: "unsigned less-or-equal",
LtU: "unsigned less",
OrderedAndEq: "ordered-and-equal",
OrderedAndNe: "ordered-and-not-equal",
OrderedAndGe: "ordered-and-greater-or-equal",
OrderedAndGt: "ordered-and-greater",
OrderedAndLe: "ordered-and-less-or-equal",
OrderedAndLt: "ordered-and-less",
UnorderedOrEq: "unordered-or-equal",
UnorderedOrNe: "unordered-or-not-equal",
UnorderedOrGe: "unordered-or-greater-or-equal",
UnorderedOrGt: "unordered-or-greater",
UnorderedOrLe: "unordered-or-less-or-equal",
UnorderedOrLt: "unordered-or-less",
}
func (f C) String() string {
if i := int(f); i < len(strings) {
return strings[i]
} else {
return "<invalid condition>"
}
}<|fim▁end|> |
OrderedAndEq
OrderedAndNe |
<|file_name|>NetworkTest.py<|end_file_name|><|fim▁begin|>#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #<|fim▁hole|># Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import unittest
import Network
import time
class TestConnection(Network.Connection):
def handlePacket(self, packet):
self.packet = packet
class TestServer(Network.Server):
def createConnection(self, sock):
return TestConnection(sock)
class NetworkTest(unittest.TestCase):
def testHandshake(self):
s = TestServer()
c = TestConnection()
c.connect("localhost")
c.sendPacket("moikka")
Network.communicate(100)
client = s.clients.values()[0]
assert client.packet == "moikka"
assert client.id == 1
def tearDown(self):
Network.shutdown()
if __name__ == "__main__":
unittest.main()<|fim▁end|> | |
<|file_name|>RegressionTree.py<|end_file_name|><|fim▁begin|># regression tree
# input is a dataframe of features
# the corresponding y value(called labels here) is the scores for each document
import pandas as pd
import numpy as np
from multiprocessing import Pool
from itertools import repeat
import scipy
import scipy.optimize
node_id = 0
def get_splitting_points(args):
# given a list
# return a list of possible splitting values
attribute, col = args
attribute.sort()
possible_split = []
for i in range(len(attribute)-1):
if attribute[i] != attribute[i+1]:
possible_split.append(np.mean((attribute[i],attribute[i+1])))
return possible_split, col
# create a dictionary, key is the attribute number, value is whole list of possible splits for that column
def find_best_split_parallel(args):
best_ls = 1000000
best_split = None
best_children = None
split_point, data, label = args
key,possible_split = split_point
for split in possible_split:
children = split_children(data, label, key, split)
#weighted average of left and right ls
ls = len(children[1])*least_square(children[1])/len(label) + len(children[3])*least_square(children[3])/len(label)
if ls < best_ls:
best_ls = ls
best_split = (key, split)
best_children = children
return best_ls, best_split, best_children
def find_best_split(data, label, split_points):
# split_points is a dictionary of possible splitting values
# return the best split
best_ls = 1000000
best_split = None
best_children = None
pool = Pool()
for ls, split, children in pool.map(find_best_split_parallel, zip(split_points.items(), repeat(data), repeat(label))):
if ls < best_ls:
best_ls = ls
best_split = split
best_children = children
pool.close()
return best_split, best_children # return a tuple(attribute, value)
def split_children(data, label, key, split):
left_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] < split]
right_index = [index for index in xrange(len(data.iloc[:,key])) if data.iloc[index,key] >= split]
left_data = data.iloc[left_index,:]
right_data = data.iloc[right_index,:]
left_label = [label[i] for i in left_index]
right_label =[label[i] for i in right_index]
return left_data, left_label, right_data, right_label
def least_square(label):
if not len(label):
return 0
return (np.sum(label)**2)/len(set(label))
def create_leaf(label):
global node_id
node_id += 1<|fim▁hole|> 'right':None,
'is_leaf':True,
'index':node_id}
leaf['value'] = round(np.mean(label),3)
return leaf
def find_splits_parallel(args):
var_space, label, col = args
# var_space = data.iloc[:,col].tolist()
return scipy.optimize.fminbound(error_function, min(var_space), max(var_space), args = (col, var_space, label), full_output = 1)
# return,
# if not min_error or error < min_error:
# min_error = error
# split_var = col
# min_split = split
def create_tree(data, all_pos_split, label, max_depth, ideal_ls, current_depth = 0):
remaining_features = all_pos_split
#stopping conditions
if sum([len(v)!= 0 for v in remaining_features.values()]) == 0:
# If there are no remaining features to consider, make current node a leaf node
return create_leaf(label)
# #Additional stopping condition (limit tree depth)
elif current_depth > max_depth:
return create_leaf(label)
#######
min_error = None
split_var = None
min_split = None
var_spaces = [data.iloc[:,col].tolist() for col in xrange(data.shape[1])]
cols = [col for col in xrange(data.shape[1])]
pool = Pool()
for split, error, ierr, numf in pool.map(find_splits_parallel, zip(var_spaces, repeat(label), cols)):
if not min_error or error < min_error:
min_error = error
split_var = col
min_split = split
pool.close()
splitting_feature = (split_var, min_split)
children = split_children(data, label, split_var, min_split)
left_data, left_label, right_data, right_label = children
if len(left_label) == 0 or len(right_label) == 0:
return create_leaf(label)
left_least_square = least_square(left_label)
# Create a leaf node if the split is "perfect"
if left_least_square < ideal_ls:
return create_leaf(left_label)
if least_square(right_label) < ideal_ls:
return create_leaf(right_label)
# recurse on children
left_tree = create_tree(left_data, remaining_features, left_label, max_depth, ideal_ls, current_depth +1)
right_tree = create_tree(right_data, remaining_features, right_label, max_depth, ideal_ls, current_depth +1)
return {'is_leaf' : False,
'value' : None,
'splitting_feature': splitting_feature,
'left' : left_tree,
'right' : right_tree,
'index' : None}
def error_function(split_point, split_var, data, label):
data1 = []
data2 = []
for i in xrange(len(data)):
temp_dat = data[i]
if temp_dat <= split_point:
data1.append(label[i])
else:
data2.append(label[i])
return least_square(data1) + least_square(data2)
def make_prediction(tree, x, annotate = False):
if tree['is_leaf']:
if annotate:
print "At leaf, predicting %s" % tree['value']
return tree['value']
else:
# the splitting value of x.
split_feature_value = x[tree['splitting_feature'][0]]
if annotate:
print "Split on %s = %s" % (tree['splitting_feature'], split_feature_value)
if split_feature_value < tree['splitting_feature'][1]:
return make_prediction(tree['left'], x, annotate)
else:
return make_prediction(tree['right'], x, annotate)
class RegressionTree:
def __init__(self, training_data, labels, max_depth=5, ideal_ls=100):
self.training_data = training_data
self.labels = labels
self.max_depth = max_depth
self.ideal_ls = ideal_ls
self.tree = None
def fit(self):
global node_id
node_id = 0
all_pos_split = {}
pool = Pool()
splitting_data = [self.training_data.iloc[:,col].tolist() for col in xrange(self.training_data.shape[1])]
cols = [col for col in xrange(self.training_data.shape[1])]
for dat, col in pool.map(get_splitting_points, zip(splitting_data, cols)):
all_pos_split[col] = dat
pool.close()
self.tree = create_tree(self.training_data, all_pos_split, self.labels, self.max_depth, self.ideal_ls)
def predict(self, test):
prediction = np.array([make_prediction(self.tree, x) for x in test])
return prediction
if __name__ == '__main__':
#read in data, label
data = pd.read_excel("mlr06.xls")
test = [[478, 184, 40, 74, 11, 31], [1000,10000,10000,10000,10000,1000,100000]]
label = data['X7']
del data['X7']
model = RegressionTree(data, label)
model.fit()
print model.predict(test)<|fim▁end|> | leaf = {'splittng_feature': None,
'left': None, |
<|file_name|>cleanup.py<|end_file_name|><|fim▁begin|>"""Cleanup script."""
from grr.lib import export_utils
# After you do this the UI complains a little, but creating a new hunt fixes it.
hunts = aff4.FACTORY.Open("aff4:/hunts/")
for hunt in hunts.ListChildren():<|fim▁hole|> cutoff = rdfvalue.RDFDatetime().Now() - rdfvalue.Duration("2h")
if fd.Get(fd.Schema.PING) < cutoff:
aff4.FACTORY.Delete(fd.urn)
# Delete all flows
for client in export_utils.GetAllClients():
aff4.FACTORY.Delete(client.Add("flows"))<|fim▁end|> | aff4.FACTORY.Delete(hunt)
# Delete clients that haven't polled in for 2hours
for fd in aff4.FACTORY.MultiOpen(export_utils.GetAllClients()): |
<|file_name|>enrollmentTermsApi.js<|end_file_name|><|fim▁begin|>//
// Copyright (C) 2016 - present Instructure, Inc.
//
// This file is part of Canvas.
//
// Canvas is free software: you can redistribute it and/or modify it under
// the terms of the GNU Affero General Public License as published by the Free
// Software Foundation, version 3 of the License.
//
// Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
// A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
// details.
//<|fim▁hole|>
import _ from 'underscore'
import Depaginate from 'jsx/shared/CheatDepaginator'
const listUrl = () => ENV.ENROLLMENT_TERMS_URL
const deserializeTerms = termGroups =>
_.flatten(
_.map(termGroups, group =>
_.map(group.enrollment_terms, (term) => {
const groupID = term.grading_period_group_id
const newGroupID = _.isNumber(groupID) ? groupID.toString() : groupID
return {
id: term.id.toString(),
name: term.name,
startAt: term.start_at ? new Date(term.start_at) : null,
endAt: term.end_at ? new Date(term.end_at) : null,
createdAt: term.created_at ? new Date(term.created_at) : null,
gradingPeriodGroupId: newGroupID,
}
})
)
)
export default {
list (terms) {
return new Promise((resolve, reject) => {
Depaginate(listUrl())
.then(response => resolve(deserializeTerms(response)))
.fail(error => reject(error))
})
}
}<|fim▁end|> | // You should have received a copy of the GNU Affero General Public License along
// with this program. If not, see <http://www.gnu.org/licenses/>. |
<|file_name|>WMSOverlayServer.py<|end_file_name|><|fim▁begin|>from kvmap.code.projections import *
from urllib2 import urlopen
from httplib import HTTPConnection
from threading import Thread
from kivy.logger import Logger
from kivy.loader import Loader
from os.path import join, dirname
import time, os
import hashlib
try:
from pyproj import Proj
from xml.etree import ElementTree as ET
except:
pass
class WMSOverlayServer(object):
cache = {}
available_maptype = dict(roadmap='Roadmap') # default
type = "wms"
'''Generic WMS server'''
def __init__(self, progress_callback=None):
self.progress_callback = progress_callback
def setProgressCallback(self, progress_callback):
self.progress_callback = progress_callback
def getInfo(self, lat, lon, epsilon):
return None
def get(self, parent, width, height):
self.bl = parent.bottom_left
self.tr = parent.top_right
self.zoom = parent.zoom
url = self.geturl(self.bl[0], self.bl[1], self.tr[0], self.tr[1], self.zoom, width, height)
if not url:
return None
key = hashlib.md5(url).hexdigest()
if key in self.cache:
return self.cache[key]
try:
image = Loader.image('http://' + self.provider_host + url, progress_callback=self.progress_callback)
self.cache[key] = image
except Exception, e:
Logger.error('OverlayServer could not find (or read) image %s [%s]' % (url, e))
image = None
def getLegendGraphic(self):
if self.legend is None and not self.triedlegend:
self.triedlegend = True
layer = self.layer
if "," in layer:
layer = layer[layer.rindex(",") + 1:]
if self.legendlayer:
layer = self.legendlayer
url = self.baseurl + "?REQUEST=GetLegendGraphic&VERSION=1.0.0&FORMAT=image/png&LAYER=%s&ext=.png" % (layer)
try:
print 'http://' + self.provider_host + url
image = Loader.image('http://' + self.provider_host + url)
self.legend = image
except Exception, e:
Logger.error('OverlayServer could not find LEGENDGRAPHICS for %s %s' % (self.baseurl, layer))
return self.legend
def xy_to_co(self, lat, lon):
if self.customBounds:
x, y = latlon_to_custom(lat, lon, self.bounds)
elif self.isPLatLon: # patch for android - does not require pyproj library
x, y = lon, lat
elif self.isPGoogle: # patch for android - does not require pyproj library<|fim▁hole|> x, y = transform(pLatlon, self.projection, lon, lat)
return x, y
def co_to_ll(self, x, y):
if self.customBounds:
u, v = custom_to_unit(lat, lon, self.bounds)
l, m = unit_to_latlon(u, v)
elif self.isPLatLon: # patch for android - does not require pyproj library
l, m = y, x
elif self.isPGoogle: # patch for android - does not require pyproj library
l, m = google_to_latlon (y, x)
else:
l, m = transform(self.projection, pLatlon, y, x)
return l, m
def geturl(self, lat1, lon1, lat2, lon2, zoom, w, h):
try:
x1, y1 = self.xy_to_co(lat1, lon1)
x2, y2 = self.xy_to_co(lat2, lon2)
return self.url + "&BBOX=%f,%f,%f,%f&WIDTH=%i&HEIGHT=%i&ext=.png" % (x1, y1, x2, y2, w, h)
except RuntimeError, e:
return None
def parseLayer(self, layer, data):
try:
name = layer.find("Name").text
except:
name = None
srss = layer.findall("SRS")
if name: # and srss:
data[name] = map(lambda x:x.text, srss)
if self.debug:
print "Provider %s provides layer %s in projections %s" % (self.provider_host, name, data[name])
subs = layer.findall("Layer")
for sub in subs:
self.parseLayer(sub, data)
def initFromGetCapabilities(self, host, baseurl, layer=None, index=0, srs=None):
self.debug = (layer == None) and (index == 0)
# GetCapabilities (Layers + SRS)
if layer is None or srs is None:
capabilities = urlopen(host + baseurl + "?SERVICE=WMS&VERSION=1.1.1&Request=GetCapabilities").read().strip()
try:
tree = ET.fromstring(capabilities)
if self.debug:
ET.dump(tree)
layers = tree.findall("Capability/Layer") # TODO: proper parsing of cascading layers and their SRS
data = {}
for l in layers:
self.parseLayer(l, data)
# Choose Layer and SRS by (alphabetical) index
if layer is None:
layer = sorted(data.keys())[index]
if srs is None:
srs = sorted(data[layer])[0]
except:
pass
print "Displaying from %s/%s: layer %s in SRS %s." % (host, baseurl, layer, srs)
# generate tile URL and init projection by EPSG code
self.layer = layer
self.baseurl = baseurl
self.url = baseurl + "?LAYERS=%s&SRS=%s&FORMAT=image/png&TRANSPARENT=TRUE&SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&STYLES=" % (layer, srs)
self.isPGoogle = False
self.isPLatLon = False
self.legend = None
self.legendlayer = None
self.triedlegend = False
if srs == "EPSG:4326":
self.isPLatLon = True
elif srs == "EPSG:900913" or srs == "EPSG:3857":
self.isPGoogle = True
try:
self.projection = pGoogle
except:
pass
else:
try:
self.projection = Proj(init=srs)
except:
pass<|fim▁end|> | x, y = latlon_to_google (lat, lon)
else: |
<|file_name|>test_generic.py<|end_file_name|><|fim▁begin|># Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from ironic.common import driver_factory
from ironic.common import exception
from ironic.conductor import task_manager
from ironic.drivers import base as driver_base
from ironic.drivers.modules import agent
from ironic.drivers.modules import fake
from ironic.drivers.modules import inspector
from ironic.drivers.modules import iscsi_deploy
from ironic.drivers.modules import noop
from ironic.drivers.modules import pxe
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.objects import utils as obj_utils
<|fim▁hole|> super(ManualManagementHardwareTestCase, self).setUp()
self.config(enabled_hardware_types=['manual-management'],
enabled_power_interfaces=['fake'],
enabled_management_interfaces=['fake'],
enabled_inspect_interfaces=['no-inspect'])
self.config(enabled=True, group='inspector')
def test_default_interfaces(self):
node = obj_utils.create_test_node(self.context,
driver='manual-management')
with task_manager.acquire(self.context, node.id) as task:
self.assertIsInstance(task.driver.management, fake.FakeManagement)
self.assertIsInstance(task.driver.power, fake.FakePower)
self.assertIsInstance(task.driver.boot, pxe.PXEBoot)
self.assertIsInstance(task.driver.deploy, iscsi_deploy.ISCSIDeploy)
self.assertIsInstance(task.driver.inspect, noop.NoInspect)
self.assertIsInstance(task.driver.raid, noop.NoRAID)
def test_supported_interfaces(self):
self.config(enabled_inspect_interfaces=['inspector', 'no-inspect'])
node = obj_utils.create_test_node(self.context,
driver='manual-management',
deploy_interface='direct',
raid_interface='agent')
with task_manager.acquire(self.context, node.id) as task:
self.assertIsInstance(task.driver.management, fake.FakeManagement)
self.assertIsInstance(task.driver.power, fake.FakePower)
self.assertIsInstance(task.driver.boot, pxe.PXEBoot)
self.assertIsInstance(task.driver.deploy, agent.AgentDeploy)
self.assertIsInstance(task.driver.inspect, inspector.Inspector)
self.assertIsInstance(task.driver.raid, agent.AgentRAID)
def test_get_properties(self):
# These properties are from vendor (agent) and boot (pxe) interfaces
expected_prop_keys = [
'deploy_forces_oob_reboot', 'deploy_kernel', 'deploy_ramdisk']
hardware_type = driver_factory.get_hardware_type("manual-management")
properties = hardware_type.get_properties()
self.assertEqual(sorted(expected_prop_keys), sorted(properties.keys()))
@mock.patch.object(driver_factory, 'default_interface', autospec=True)
def test_get_properties_none(self, mock_def_iface):
hardware_type = driver_factory.get_hardware_type("manual-management")
mock_def_iface.side_effect = exception.NoValidDefaultForInterface("no")
properties = hardware_type.get_properties()
self.assertEqual({}, properties)
self.assertEqual(len(driver_base.ALL_INTERFACES),
mock_def_iface.call_count)<|fim▁end|> | class ManualManagementHardwareTestCase(db_base.DbTestCase):
def setUp(self): |
<|file_name|>file-type.js<|end_file_name|><|fim▁begin|>'use strict';
const toBytes = s => [...s].map(c => c.charCodeAt(0));
const xpiZipFilename = toBytes('META-INF/mozilla.rsa');
const oxmlContentTypes = toBytes('[Content_Types].xml');
const oxmlRels = toBytes('_rels/.rels');
const fileType = input => {
const buf = input instanceof Uint8Array ? input : new Uint8Array(input);
if (!(buf && buf.length > 1)) {
return null;
}
const check = (header, options) => {
options = Object.assign({
offset: 0
}, options);
for (let i = 0; i < header.length; i++) {
// If a bitmask is set
if (options.mask) {
// If header doesn't equal `buf` with bits masked off
if (header[i] !== (options.mask[i] & buf[i + options.offset])) {
return false;
}
} else if (header[i] !== buf[i + options.offset]) {
return false;
}
}
return true;
};
const checkString = (header, options) => check(toBytes(header), options);
if (check([0xFF, 0xD8, 0xFF])) {
return {
ext: 'jpg',
mime: 'image/jpeg'
};
}
if (check([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A])) {
return {
ext: 'png',
mime: 'image/png'
};
}
if (check([0x47, 0x49, 0x46])) {
return {
ext: 'gif',
mime: 'image/gif'
};
}
if (check([0x57, 0x45, 0x42, 0x50], {offset: 8})) {
return {
ext: 'webp',
mime: 'image/webp'
};
}
if (check([0x46, 0x4C, 0x49, 0x46])) {
return {
ext: 'flif',
mime: 'image/flif'
};
}
// Needs to be before `tif` check
if (
(check([0x49, 0x49, 0x2A, 0x0]) || check([0x4D, 0x4D, 0x0, 0x2A])) &&
check([0x43, 0x52], {offset: 8})
) {
return {
ext: 'cr2',
mime: 'image/x-canon-cr2'
};
}
if (
check([0x49, 0x49, 0x2A, 0x0]) ||
check([0x4D, 0x4D, 0x0, 0x2A])
) {
return {
ext: 'tif',
mime: 'image/tiff'
};
}
if (check([0x42, 0x4D])) {
return {
ext: 'bmp',
mime: 'image/bmp'
};
}
if (check([0x49, 0x49, 0xBC])) {
return {
ext: 'jxr',
mime: 'image/vnd.ms-photo'
};
}
if (check([0x38, 0x42, 0x50, 0x53])) {
return {
ext: 'psd',
mime: 'image/vnd.adobe.photoshop'
};
}
// Zip-based file formats
// Need to be before the `zip` check
if (check([0x50, 0x4B, 0x3, 0x4])) {
if (
check([0x6D, 0x69, 0x6D, 0x65, 0x74, 0x79, 0x70, 0x65, 0x61, 0x70, 0x70, 0x6C, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6F, 0x6E, 0x2F, 0x65, 0x70, 0x75, 0x62, 0x2B, 0x7A, 0x69, 0x70], {offset: 30})
) {
return {
ext: 'epub',
mime: 'application/epub+zip'
};<|fim▁hole|> }
// Assumes signed `.xpi` from addons.mozilla.org
if (check(xpiZipFilename, {offset: 30})) {
return {
ext: 'xpi',
mime: 'application/x-xpinstall'
};
}
if (checkString('mimetypeapplication/vnd.oasis.opendocument.text', {offset: 30})) {
return {
ext: 'odt',
mime: 'application/vnd.oasis.opendocument.text'
};
}
if (checkString('mimetypeapplication/vnd.oasis.opendocument.spreadsheet', {offset: 30})) {
return {
ext: 'ods',
mime: 'application/vnd.oasis.opendocument.spreadsheet'
};
}
if (checkString('mimetypeapplication/vnd.oasis.opendocument.presentation', {offset: 30})) {
return {
ext: 'odp',
mime: 'application/vnd.oasis.opendocument.presentation'
};
}
// https://github.com/file/file/blob/master/magic/Magdir/msooxml
if (check(oxmlContentTypes, {offset: 30}) || check(oxmlRels, {offset: 30})) {
const sliced = buf.subarray(4, 4 + 2000);
const nextZipHeaderIndex = arr => arr.findIndex((el, i, arr) => arr[i] === 0x50 && arr[i + 1] === 0x4B && arr[i + 2] === 0x3 && arr[i + 3] === 0x4);
const header2Pos = nextZipHeaderIndex(sliced);
if (header2Pos !== -1) {
const slicedAgain = buf.subarray(header2Pos + 8, header2Pos + 8 + 1000);
const header3Pos = nextZipHeaderIndex(slicedAgain);
if (header3Pos !== -1) {
const offset = 8 + header2Pos + header3Pos + 30;
if (checkString('word/', {offset})) {
return {
ext: 'docx',
mime: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
};
}
if (checkString('ppt/', {offset})) {
return {
ext: 'pptx',
mime: 'application/vnd.openxmlformats-officedocument.presentationml.presentation'
};
}
if (checkString('xl/', {offset})) {
return {
ext: 'xlsx',
mime: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
};
}
}
}
}
}
if (
check([0x50, 0x4B]) &&
(buf[2] === 0x3 || buf[2] === 0x5 || buf[2] === 0x7) &&
(buf[3] === 0x4 || buf[3] === 0x6 || buf[3] === 0x8)
) {
return {
ext: 'zip',
mime: 'application/zip'
};
}
if (check([0x75, 0x73, 0x74, 0x61, 0x72], {offset: 257})) {
return {
ext: 'tar',
mime: 'application/x-tar'
};
}
if (
check([0x52, 0x61, 0x72, 0x21, 0x1A, 0x7]) &&
(buf[6] === 0x0 || buf[6] === 0x1)
) {
return {
ext: 'rar',
mime: 'application/x-rar-compressed'
};
}
if (check([0x1F, 0x8B, 0x8])) {
return {
ext: 'gz',
mime: 'application/gzip'
};
}
if (check([0x42, 0x5A, 0x68])) {
return {
ext: 'bz2',
mime: 'application/x-bzip2'
};
}
if (check([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])) {
return {
ext: '7z',
mime: 'application/x-7z-compressed'
};
}
if (check([0x78, 0x01])) {
return {
ext: 'dmg',
mime: 'application/x-apple-diskimage'
};
}
if (check([0x33, 0x67, 0x70, 0x35]) || // 3gp5
(
check([0x0, 0x0, 0x0]) && check([0x66, 0x74, 0x79, 0x70], {offset: 4}) &&
(
check([0x6D, 0x70, 0x34, 0x31], {offset: 8}) || // MP41
check([0x6D, 0x70, 0x34, 0x32], {offset: 8}) || // MP42
check([0x69, 0x73, 0x6F, 0x6D], {offset: 8}) || // ISOM
check([0x69, 0x73, 0x6F, 0x32], {offset: 8}) || // ISO2
check([0x6D, 0x6D, 0x70, 0x34], {offset: 8}) || // MMP4
check([0x4D, 0x34, 0x56], {offset: 8}) || // M4V
check([0x64, 0x61, 0x73, 0x68], {offset: 8}) // DASH
)
)) {
return {
ext: 'mp4',
mime: 'video/mp4'
};
}
if (check([0x4D, 0x54, 0x68, 0x64])) {
return {
ext: 'mid',
mime: 'audio/midi'
};
}
// https://github.com/threatstack/libmagic/blob/master/magic/Magdir/matroska
if (check([0x1A, 0x45, 0xDF, 0xA3])) {
const sliced = buf.subarray(4, 4 + 4096);
const idPos = sliced.findIndex((el, i, arr) => arr[i] === 0x42 && arr[i + 1] === 0x82);
if (idPos !== -1) {
const docTypePos = idPos + 3;
const findDocType = type => [...type].every((c, i) => sliced[docTypePos + i] === c.charCodeAt(0));
if (findDocType('matroska')) {
return {
ext: 'mkv',
mime: 'video/x-matroska'
};
}
if (findDocType('webm')) {
return {
ext: 'webm',
mime: 'video/webm'
};
}
}
}
if (check([0x0, 0x0, 0x0, 0x14, 0x66, 0x74, 0x79, 0x70, 0x71, 0x74, 0x20, 0x20]) ||
check([0x66, 0x72, 0x65, 0x65], {offset: 4}) ||
check([0x66, 0x74, 0x79, 0x70, 0x71, 0x74, 0x20, 0x20], {offset: 4}) ||
check([0x6D, 0x64, 0x61, 0x74], {offset: 4}) || // MJPEG
check([0x77, 0x69, 0x64, 0x65], {offset: 4})) {
return {
ext: 'mov',
mime: 'video/quicktime'
};
}
// RIFF file format which might be AVI, WAV, QCP, etc
if (check([0x52, 0x49, 0x46, 0x46])) {
if (check([0x41, 0x56, 0x49], {offset: 8})) {
return {
ext: 'avi',
mime: 'video/x-msvideo'
};
}
if (check([0x57, 0x41, 0x56, 0x45], {offset: 8})) {
return {
ext: 'wav',
mime: 'audio/x-wav'
};
}
// QLCM, QCP file
if (check([0x51, 0x4C, 0x43, 0x4D], {offset: 8})) {
return {
ext: 'qcp',
mime: 'audio/qcelp'
};
}
}
if (check([0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9])) {
return {
ext: 'wmv',
mime: 'video/x-ms-wmv'
};
}
if (
check([0x0, 0x0, 0x1, 0xBA]) ||
check([0x0, 0x0, 0x1, 0xB3])
) {
return {
ext: 'mpg',
mime: 'video/mpeg'
};
}
if (check([0x66, 0x74, 0x79, 0x70, 0x33, 0x67], {offset: 4})) {
return {
ext: '3gp',
mime: 'video/3gpp'
};
}
// Check for MPEG header at different starting offsets
for (let start = 0; start < 2 && start < (buf.length - 16); start++) {
if (
check([0x49, 0x44, 0x33], {offset: start}) || // ID3 header
check([0xFF, 0xE2], {offset: start, mask: [0xFF, 0xE2]}) // MPEG 1 or 2 Layer 3 header
) {
return {
ext: 'mp3',
mime: 'audio/mpeg'
};
}
if (
check([0xFF, 0xE4], {offset: start, mask: [0xFF, 0xE4]}) // MPEG 1 or 2 Layer 2 header
) {
return {
ext: 'mp2',
mime: 'audio/mpeg'
};
}
if (
check([0xFF, 0xF8], {offset: start, mask: [0xFF, 0xFC]}) // MPEG 2 layer 0 using ADTS
) {
return {
ext: 'mp2',
mime: 'audio/mpeg'
};
}
if (
check([0xFF, 0xF0], {offset: start, mask: [0xFF, 0xFC]}) // MPEG 4 layer 0 using ADTS
) {
return {
ext: 'mp4',
mime: 'audio/mpeg'
};
}
}
if (
check([0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x41], {offset: 4}) ||
check([0x4D, 0x34, 0x41, 0x20])
) {
return {
ext: 'm4a',
mime: 'audio/m4a'
};
}
// Needs to be before `ogg` check
if (check([0x4F, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64], {offset: 28})) {
return {
ext: 'opus',
mime: 'audio/opus'
};
}
// If 'OggS' in first bytes, then OGG container
if (check([0x4F, 0x67, 0x67, 0x53])) {
// This is a OGG container
// If ' theora' in header.
if (check([0x80, 0x74, 0x68, 0x65, 0x6F, 0x72, 0x61], {offset: 28})) {
return {
ext: 'ogv',
mime: 'video/ogg'
};
}
// If '\x01video' in header.
if (check([0x01, 0x76, 0x69, 0x64, 0x65, 0x6F, 0x00], {offset: 28})) {
return {
ext: 'ogm',
mime: 'video/ogg'
};
}
// If ' FLAC' in header https://xiph.org/flac/faq.html
if (check([0x7F, 0x46, 0x4C, 0x41, 0x43], {offset: 28})) {
return {
ext: 'oga',
mime: 'audio/ogg'
};
}
// 'Speex ' in header https://en.wikipedia.org/wiki/Speex
if (check([0x53, 0x70, 0x65, 0x65, 0x78, 0x20, 0x20], {offset: 28})) {
return {
ext: 'spx',
mime: 'audio/ogg'
};
}
// If '\x01vorbis' in header
if (check([0x01, 0x76, 0x6F, 0x72, 0x62, 0x69, 0x73], {offset: 28})) {
return {
ext: 'ogg',
mime: 'audio/ogg'
};
}
// Default OGG container https://www.iana.org/assignments/media-types/application/ogg
return {
ext: 'ogx',
mime: 'application/ogg'
};
}
if (check([0x66, 0x4C, 0x61, 0x43])) {
return {
ext: 'flac',
mime: 'audio/x-flac'
};
}
if (check([0x23, 0x21, 0x41, 0x4D, 0x52, 0x0A])) {
return {
ext: 'amr',
mime: 'audio/amr'
};
}
if (check([0x25, 0x50, 0x44, 0x46])) {
return {
ext: 'pdf',
mime: 'application/pdf'
};
}
if (check([0x4D, 0x5A])) {
return {
ext: 'exe',
mime: 'application/x-msdownload'
};
}
if (
(buf[0] === 0x43 || buf[0] === 0x46) &&
check([0x57, 0x53], {offset: 1})
) {
return {
ext: 'swf',
mime: 'application/x-shockwave-flash'
};
}
if (check([0x7B, 0x5C, 0x72, 0x74, 0x66])) {
return {
ext: 'rtf',
mime: 'application/rtf'
};
}
if (check([0x00, 0x61, 0x73, 0x6D])) {
return {
ext: 'wasm',
mime: 'application/wasm'
};
}
if (
check([0x77, 0x4F, 0x46, 0x46]) &&
(
check([0x00, 0x01, 0x00, 0x00], {offset: 4}) ||
check([0x4F, 0x54, 0x54, 0x4F], {offset: 4})
)
) {
return {
ext: 'woff',
mime: 'font/woff'
};
}
if (
check([0x77, 0x4F, 0x46, 0x32]) &&
(
check([0x00, 0x01, 0x00, 0x00], {offset: 4}) ||
check([0x4F, 0x54, 0x54, 0x4F], {offset: 4})
)
) {
return {
ext: 'woff2',
mime: 'font/woff2'
};
}
if (
check([0x4C, 0x50], {offset: 34}) &&
(
check([0x00, 0x00, 0x01], {offset: 8}) ||
check([0x01, 0x00, 0x02], {offset: 8}) ||
check([0x02, 0x00, 0x02], {offset: 8})
)
) {
return {
ext: 'eot',
mime: 'application/octet-stream'
};
}
if (check([0x00, 0x01, 0x00, 0x00, 0x00])) {
return {
ext: 'ttf',
mime: 'font/ttf'
};
}
if (check([0x4F, 0x54, 0x54, 0x4F, 0x00])) {
return {
ext: 'otf',
mime: 'font/otf'
};
}
if (check([0x00, 0x00, 0x01, 0x00])) {
return {
ext: 'ico',
mime: 'image/x-icon'
};
}
if (check([0x00, 0x00, 0x02, 0x00])) {
return {
ext: 'cur',
mime: 'image/x-icon'
};
}
if (check([0x46, 0x4C, 0x56, 0x01])) {
return {
ext: 'flv',
mime: 'video/x-flv'
};
}
if (check([0x25, 0x21])) {
return {
ext: 'ps',
mime: 'application/postscript'
};
}
if (check([0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])) {
return {
ext: 'xz',
mime: 'application/x-xz'
};
}
if (check([0x53, 0x51, 0x4C, 0x69])) {
return {
ext: 'sqlite',
mime: 'application/x-sqlite3'
};
}
if (check([0x4E, 0x45, 0x53, 0x1A])) {
return {
ext: 'nes',
mime: 'application/x-nintendo-nes-rom'
};
}
if (check([0x43, 0x72, 0x32, 0x34])) {
return {
ext: 'crx',
mime: 'application/x-google-chrome-extension'
};
}
if (
check([0x4D, 0x53, 0x43, 0x46]) ||
check([0x49, 0x53, 0x63, 0x28])
) {
return {
ext: 'cab',
mime: 'application/vnd.ms-cab-compressed'
};
}
// Needs to be before `ar` check
if (check([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A, 0x64, 0x65, 0x62, 0x69, 0x61, 0x6E, 0x2D, 0x62, 0x69, 0x6E, 0x61, 0x72, 0x79])) {
return {
ext: 'deb',
mime: 'application/x-deb'
};
}
if (check([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E])) {
return {
ext: 'ar',
mime: 'application/x-unix-archive'
};
}
if (check([0xED, 0xAB, 0xEE, 0xDB])) {
return {
ext: 'rpm',
mime: 'application/x-rpm'
};
}
if (
check([0x1F, 0xA0]) ||
check([0x1F, 0x9D])
) {
return {
ext: 'Z',
mime: 'application/x-compress'
};
}
if (check([0x4C, 0x5A, 0x49, 0x50])) {
return {
ext: 'lz',
mime: 'application/x-lzip'
};
}
if (check([0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1])) {
return {
ext: 'msi',
mime: 'application/x-msi'
};
}
if (check([0x06, 0x0E, 0x2B, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0D, 0x01, 0x02, 0x01, 0x01, 0x02])) {
return {
ext: 'mxf',
mime: 'application/mxf'
};
}
if (check([0x47], {offset: 4}) && (check([0x47], {offset: 192}) || check([0x47], {offset: 196}))) {
return {
ext: 'mts',
mime: 'video/mp2t'
};
}
if (check([0x42, 0x4C, 0x45, 0x4E, 0x44, 0x45, 0x52])) {
return {
ext: 'blend',
mime: 'application/x-blender'
};
}
if (check([0x42, 0x50, 0x47, 0xFB])) {
return {
ext: 'bpg',
mime: 'image/bpg'
};
}
if (check([0x00, 0x00, 0x00, 0x0C, 0x6A, 0x50, 0x20, 0x20, 0x0D, 0x0A, 0x87, 0x0A])) {
// JPEG-2000 family
if (check([0x6A, 0x70, 0x32, 0x20], {offset: 20})) {
return {
ext: 'jp2',
mime: 'image/jp2'
};
}
if (check([0x6A, 0x70, 0x78, 0x20], {offset: 20})) {
return {
ext: 'jpx',
mime: 'image/jpx'
};
}
if (check([0x6A, 0x70, 0x6D, 0x20], {offset: 20})) {
return {
ext: 'jpm',
mime: 'image/jpm'
};
}
if (check([0x6D, 0x6A, 0x70, 0x32], {offset: 20})) {
return {
ext: 'mj2',
mime: 'image/mj2'
};
}
}
if (check([0x46, 0x4F, 0x52, 0x4D, 0x00])) {
return {
ext: 'aif',
mime: 'audio/aiff'
};
}
if (checkString('<?xml ')) {
return {
ext: 'xml',
mime: 'application/xml'
};
}
if (check([0x42, 0x4F, 0x4F, 0x4B, 0x4D, 0x4F, 0x42, 0x49], {offset: 60})) {
return {
ext: 'mobi',
mime: 'application/x-mobipocket-ebook'
};
}
// File Type Box (https://en.wikipedia.org/wiki/ISO_base_media_file_format)
if (check([0x66, 0x74, 0x79, 0x70], {offset: 4})) {
if (check([0x6D, 0x69, 0x66, 0x31], {offset: 8})) {
return {
ext: 'heic',
mime: 'image/heif'
};
}
if (check([0x6D, 0x73, 0x66, 0x31], {offset: 8})) {
return {
ext: 'heic',
mime: 'image/heif-sequence'
};
}
if (check([0x68, 0x65, 0x69, 0x63], {offset: 8}) || check([0x68, 0x65, 0x69, 0x78], {offset: 8})) {
return {
ext: 'heic',
mime: 'image/heic'
};
}
if (check([0x68, 0x65, 0x76, 0x63], {offset: 8}) || check([0x68, 0x65, 0x76, 0x78], {offset: 8})) {
return {
ext: 'heic',
mime: 'image/heic-sequence'
};
}
}
return null;
};<|fim▁end|> | |
<|file_name|>control.rs<|end_file_name|><|fim▁begin|>use cgmath;
use claymore_scene::Transform;
pub type MousePos = (i32, i32);
pub struct Control {
rotate_speed: f32,
move_speed: f32,
zoom_speed: f32,
rotate_base: Option<(MousePos, Transform<f32>)>,
move_base: Option<(MousePos, cgmath::Vector3<f32>)>,
last_pos: MousePos,
space: Transform<f32>,
}
impl Control {
pub fn new(rot_speed: f32, move_speed: f32, zoom_speed: f32,
space: Transform<f32>) -> Control {
Control {
rotate_speed: rot_speed,
move_speed: move_speed,
zoom_speed: zoom_speed,
rotate_base: None,
move_base: None,
last_pos: (0, 0),
space: space,
}
}
pub fn rot_capture(&mut self, transform: &Transform<f32>) {
self.rotate_base = Some((self.last_pos, transform.clone()));<|fim▁hole|> pub fn rot_release(&mut self) {
self.rotate_base = None;
}
pub fn move_capture(&mut self, transform: &Transform<f32>) {
self.move_base = Some((self.last_pos, transform.disp));
}
pub fn move_release(&mut self) {
self.move_base = None;
}
pub fn position(&mut self, coords: MousePos,
transform: &mut Transform<f32>) {
self.last_pos = coords;
match self.rotate_base {
Some((ref base_pos, ref base_transform)) => {
use cgmath::Transform;
// p' = Mp * Tc^ * (Tr * Rz * Tr^) * p
// Tx = (Tr * Rz^ * Tr^) * Tc
let path = (coords.0 - base_pos.0) as f32 * -self.rotate_speed;
let rotation = cgmath::Decomposed {
scale: 1.0,
rot: cgmath::Rotation3::from_axis_angle(
&cgmath::vec3(0.0, 0.0, 1.0), cgmath::rad(path)),
disp: cgmath::zero(),
};
let space_inv = self.space.invert().unwrap();
let relative = self.space.concat(&rotation.concat(&space_inv));
*transform = relative.concat(base_transform);
},
None => (),
}
match self.move_base {
Some((base_pos, ref base_disp)) => {
use cgmath::{Vector, Rotation};
let local_vector = cgmath::vec3(
-(coords.0 - base_pos.0) as f32,
(coords.1 - base_pos.1) as f32,
0.0).mul_s(self.move_speed);
let cam_vector = transform.rot.rotate_vector(&local_vector);
transform.disp = base_disp.add_v(&cam_vector);
},
None => (),
}
}
pub fn wheel(&mut self, shift: f64, transform: &mut Transform<f32>) {
use cgmath::{Vector, Transform};
let vector = transform.transform_vector(&cgmath::vec3(0.0, 0.0, 1.0));
transform.disp.add_self_v(&vector.mul_s(shift as f32 * -self.zoom_speed));
}
}<|fim▁end|> | }
|
<|file_name|>side-panel.e2e.ts<|end_file_name|><|fim▁begin|>import { browser, by, element } from 'protractor';
describe('App', () => {
beforeEach(() => {
// change hash depending on router LocationStrategy
browser.get('/#/home');
});
it('should have a title', () => {
let subject = browser.getTitle();
let result = 'Chroma An Interactive Palette tool';
expect(subject).toEqual(result);
});
it('should have `your content here` x-large', () => {
let subject = element(by.css('[x-large]')).getText();
let result = 'Your Content Here';
expect(subject).toEqual(result);
});
<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>BuildAndRun.py<|end_file_name|><|fim▁begin|>macimport os
import subprocess
name = "gobuildmaster"
current_hash = ""
for line in os.popen("md5sum " + name).readlines():
current_hash = line.split(' ')[0]
# Move the old version over
for line in os.popen('cp ' + name + ' old' + name).readlines():
print line.strip()
# Rebuild
for line in os.popen('go build').readlines():<|fim▁hole|>size_1 = os.path.getsize('./old' + name)
size_2 = os.path.getsize('./' + name)
lines = os.popen('ps -ef | grep ' + name).readlines()
running = False
for line in lines:
if "./" + name in line:
running = True
new_hash = ""
for line in os.popen("md5sum " + name).readlines():
new_hash = line.split(' ')[0]
if size_1 != size_2 or new_hash != current_hash or not running:
if not running:
for line in os.popen('cat out.txt | mail -E -s "Crash Report ' + name + '" [email protected]').readlines():
pass
for line in os.popen('echo "" > out.txt').readlines():
pass
for line in os.popen('killall ' + name).readlines():
pass
subprocess.Popen(['./' + name])<|fim▁end|> | print line.strip()
|
<|file_name|>dashboard.js<|end_file_name|><|fim▁begin|>var Index = function () {
return {
//main function to initiate the module
init: function () {
App.addResponsiveHandler(function () {
Index.initCalendar();
jQuery('.vmaps').each(function () {
var map = jQuery(this);
map.width(map.parent().width());
});
});
},
initJQVMAP: function () {
var showMap = function (name) {
jQuery('.vmaps').hide();
jQuery('#vmap_' + name).show();
};
var setMap = function (name) {
var data = {
map: 'world_en',
backgroundColor: null,
borderColor: '#333333',
borderOpacity: 0.5,
borderWidth: 1,
color: '#c6c6c6',
enableZoom: true,
hoverColor: '#c9dfaf',
hoverOpacity: null,
values: sample_data,
normalizeFunction: 'linear',
scaleColors: ['#b6da93', '#909cae'],
selectedColor: '#c9dfaf',
selectedRegion: null,
showTooltip: true,
onLabelShow: function (event, label, code) {
},
onRegionOver: function (event, code) {
if (code == 'ca') {
event.preventDefault();
}
},
onRegionClick: function (element, code, region) {
var message = 'You clicked "' + region + '" which has the code: ' + code.toUpperCase();
alert(message);
}
};
data.map = name + '_en';
var map = jQuery('#vmap_' + name);
if (!map) {
return;
}
map.width(map.parent().parent().width());
map.show();
map.vectorMap(data);
map.hide();
};
setMap("world");
setMap("usa");
setMap("europe");
setMap("russia");
setMap("germany");
showMap("world");
jQuery('#regional_stat_world').click(function () {
showMap("world");
});
jQuery('#regional_stat_usa').click(function () {
showMap("usa");
});
jQuery('#regional_stat_europe').click(function () {
showMap("europe");
});
jQuery('#regional_stat_russia').click(function () {
showMap("russia");
});
jQuery('#regional_stat_germany').click(function () {
showMap("germany");
});
$('#region_statistics_loading').hide();
$('#region_statistics_content').show();
},
initCalendar: function () {
if (!jQuery().fullCalendar) {
return;
}
var that = $("#calendar");
var feeds = ["EventAction!getListEvent"];
var h = {};
if (that.width() <= 400) {
that.addClass("mobile");
h = {
left: 'title, prev, next',
center: '',
right: 'today,month,agendaWeek,agendaDay'
};
} else {
that.removeClass("mobile");
if (App.isRTL()) {
h = {
right: 'title',
center: '',
left: 'prev,next,today,month,agendaWeek,agendaDay'
};
} else {
h = {
left: 'title',
center: '',
right: 'prev,next,today,month,agendaWeek,agendaDay'
};
}
}
// destroy the calendar
that.fullCalendar('destroy');
//re-initialize the calendar
calendar = that.fullCalendar({
disableDragging: true,
header: h,
editable: false,
eventSources : [
{
url: feeds[0],
color: "white",
textColor: "black",
error: function() {
window.alert('Fetching events error', feeds[0]);
}
}
]
});
},
initChat: function () {
var cont = $('#chats');
var list = $('.chats', cont);
var form = $('.chat-form', cont);
var input = $('input', form);
var btn = $('.btn', form);
var handleClick = function (e) {
e.preventDefault();
var text = input.val();
if (text.length == 0) {
return;
}
var time = new Date();
var time_str = time.toString('MMM dd, yyyy hh:mm');
var tpl = '';
tpl += '<li class="out">';
tpl += '<img class="avatar" alt="" src="assets/img/avatar1.jpg"/>';
tpl += '<div class="message">';
tpl += '<span class="arrow"></span>';
tpl += '<a href="#" class="name">Bob Nilson</a> ';
tpl += '<span class="datetime">at ' + time_str + '</span>';
tpl += '<span class="body">';
tpl += text;
tpl += '</span>';
tpl += '</div>';
tpl += '</li>';
var msg = list.append(tpl);
input.val("");
$('.scroller', cont).slimScroll({
scrollTo: list.height()
});
}
/*
$('.scroller', cont).slimScroll({
scrollTo: list.height()
});
*/
btn.click(handleClick);
input.keypress(function (e) {
if (e.which == 13) {
handleClick();
return false; //<---- Add this line
}
});
},
initIntro: function () {
if ($.cookie('intro_show')) {
return;
}
$.cookie('intro_show', 1);
setTimeout(function () {
var unique_id = $.gritter.add({
// (string | mandatory) the heading of the notification
title: 'Meet Metronic!',
// (string | mandatory) the text inside the notification
text: 'Metronic is a brand new Responsive Admin Dashboard Template you have always been looking for!',
// (string | optional) the image to display on the left
image: './assets/img/avatar1.jpg',
// (bool | optional) if you want it to fade out on its own or just sit there
sticky: true,
// (int | optional) the time you want it to be alive for before fading out
time: '',
// (string | optional) the class name you want to apply to that specific message
class_name: 'my-sticky-class'
});
// You can have it return a unique id, this can be used to manually remove it later using
setTimeout(function () {
$.gritter.remove(unique_id, {
fade: true,
speed: 'slow'
});
}, 12000);
}, 2000);
setTimeout(function () {
var unique_id = $.gritter.add({
// (string | mandatory) the heading of the notification
title: 'Buy Metronic!',
// (string | mandatory) the text inside the notification
text: 'Metronic comes with a huge collection of reusable and easy customizable UI components and plugins. Buy Metronic today!',
// (string | optional) the image to display on the left
image: './assets/img/avatar1.jpg',
// (bool | optional) if you want it to fade out on its own or just sit there
sticky: true,
// (int | optional) the time you want it to be alive for before fading out
time: '',
// (string | optional) the class name you want to apply to that specific message
class_name: 'my-sticky-class'
});
// You can have it return a unique id, this can be used to manually remove it later using
setTimeout(function () {
$.gritter.remove(unique_id, {
fade: true,
speed: 'slow'
});
}, 13000);
}, 8000);
setTimeout(function () {
$('#styler').pulsate({
color: "#bb3319",
repeat: 10
});
$.extend($.gritter.options, {
position: 'top-left'
});
var unique_id = $.gritter.add({
position: 'top-left',
// (string | mandatory) the heading of the notification
title: 'Customize Metronic!',
// (string | mandatory) the text inside the notification
text: 'Metronic allows you to easily customize the theme colors and layout settings.',
// (string | optional) the image to display on the left
image1: './assets/img/avatar1.png',
// (bool | optional) if you want it to fade out on its own or just sit there
sticky: true,
// (int | optional) the time you want it to be alive for before fading out
time: '',
// (string | optional) the class name you want to apply to that specific message
class_name: 'my-sticky-class'
});
$.extend($.gritter.options, {
position: 'top-right'
});
// You can have it return a unique id, this can be used to manually remove it later using
setTimeout(function () {
$.gritter.remove(unique_id, {
fade: true,
speed: 'slow'
});
}, 15000);
}, 23000);
setTimeout(function () {
$.extend($.gritter.options, {
position: 'top-left'
});
var unique_id = $.gritter.add({
// (string | mandatory) the heading of the notification
title: 'Notification',
// (string | mandatory) the text inside the notification
text: 'You have 3 new notifications.',
// (string | optional) the image to display on the left
image1: './assets/img/image1.jpg',
// (bool | optional) if you want it to fade out on its own or just sit there
sticky: true,
// (int | optional) the time you want it to be alive for before fading out
time: '',
// (string | optional) the class name you want to apply to that specific message
class_name: 'my-sticky-class'
});
setTimeout(function () {
$.gritter.remove(unique_id, {
fade: true,
speed: 'slow'
});
}, 4000);
$.extend($.gritter.options, {
position: 'top-right'
});
var number = $('#header_notification_bar .badge').text();
number = parseInt(number);
number = number + 3;
$('#header_notification_bar .badge').text(number);
$('#header_notification_bar').pulsate({
color: "#66bce6",
repeat: 5
});
}, 40000);
setTimeout(function () {
$.extend($.gritter.options, {
position: 'top-left'<|fim▁hole|> title: 'Inbox',
// (string | mandatory) the text inside the notification
text: 'You have 2 new messages in your inbox.',
// (string | optional) the image to display on the left
image1: './assets/img/avatar1.jpg',
// (bool | optional) if you want it to fade out on its own or just sit there
sticky: true,
// (int | optional) the time you want it to be alive for before fading out
time: '',
// (string | optional) the class name you want to apply to that specific message
class_name: 'my-sticky-class'
});
$.extend($.gritter.options, {
position: 'top-right'
});
setTimeout(function () {
$.gritter.remove(unique_id, {
fade: true,
speed: 'slow'
});
}, 4000);
var number = $('#header_inbox_bar .badge').text();
number = parseInt(number);
number = number + 2;
$('#header_inbox_bar .badge').text(number);
$('#header_inbox_bar').pulsate({
color: "#dd5131",
repeat: 5
});
}, 60000);
}
};
}();<|fim▁end|> | });
var unique_id = $.gritter.add({
// (string | mandatory) the heading of the notification |
<|file_name|>taskConfiguration.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as nls from 'vs/nls';
import * as Objects from 'vs/base/common/objects';
import { IStringDictionary } from 'vs/base/common/collections';
import { Platform } from 'vs/base/common/platform';
import * as Types from 'vs/base/common/types';
import * as UUID from 'vs/base/common/uuid';
import { ValidationStatus, IProblemReporter as IProblemReporterBase } from 'vs/base/common/parsers';
import {
NamedProblemMatcher, ProblemMatcher, ProblemMatcherParser, Config as ProblemMatcherConfig,
isNamedProblemMatcher, ProblemMatcherRegistry
} from 'vs/workbench/parts/tasks/common/problemMatcher';
import { IWorkspaceFolder } from 'vs/platform/workspace/common/workspace';
import * as Tasks from '../common/tasks';
import { TaskDefinitionRegistry } from '../common/taskDefinitionRegistry';
import { TaskDefinition } from 'vs/workbench/parts/tasks/node/tasks';
import { ConfiguredInput } from 'vs/workbench/services/configurationResolver/common/configurationResolver';
export const enum ShellQuoting {
/**
* Default is character escaping.
*/
escape = 1,
/**
* Default is strong quoting
*/
strong = 2,
/**
* Default is weak quoting.
*/
weak = 3
}
export interface ShellQuotingOptions {
/**
* The character used to do character escaping.
*/
escape?: string | {
escapeChar: string;
charsToEscape: string;
};
/**
* The character used for string quoting.
*/
strong?: string;
/**
* The character used for weak quoting.
*/
weak?: string;
}
export interface ShellConfiguration {
executable?: string;
args?: string[];
quoting?: ShellQuotingOptions;
}
export interface CommandOptionsConfig {
/**
* The current working directory of the executed program or shell.
* If omitted VSCode's current workspace root is used.
*/
cwd?: string;
/**
* The additional environment of the executed program or shell. If omitted
* the parent process' environment is used.
*/
env?: IStringDictionary<string>;
/**
* The shell configuration;
*/
shell?: ShellConfiguration;
}
export interface PresentationOptionsConfig {
/**
* Controls whether the terminal executing a task is brought to front or not.
* Defaults to `RevealKind.Always`.
*/
reveal?: string;
/**
* Controls whether the executed command is printed to the output window or terminal as well.
*/
echo?: boolean;
/**
* Controls whether the terminal is focus when this task is executed
*/
focus?: boolean;
/**
* Controls whether the task runs in a new terminal
*/
panel?: string;
/**
* Controls whether to show the "Terminal will be reused by tasks, press any key to close it" message.
*/
showReuseMessage?: boolean;
/**
* Controls whether the terminal should be cleared before running the task.
*/
clear?: boolean;
/**
* Controls whether the task is executed in a specific terminal group using split panes.
*/
group?: string;
}
export interface RunOptionsConfig {
reevaluateOnRerun?: boolean;
runOn?: string;
}
export interface TaskIdentifier {
type?: string;
[name: string]: any;
}
export namespace TaskIdentifier {
export function is(value: any): value is TaskIdentifier {
let candidate: TaskIdentifier = value;
return candidate !== undefined && Types.isString(value.type);
}
}
export interface LegacyTaskProperties {
/**
* @deprecated Use `isBackground` instead.
* Whether the executed command is kept alive and is watching the file system.
*/
isWatching?: boolean;
/**
* @deprecated Use `group` instead.
* Whether this task maps to the default build command.
*/
isBuildCommand?: boolean;
/**
* @deprecated Use `group` instead.
* Whether this task maps to the default test command.
*/
isTestCommand?: boolean;
}
export interface LegacyCommandProperties {
/**
* Whether this is a shell or process
*/
type?: string;
/**
* @deprecated Use presentation options
* Controls whether the output view of the running tasks is brought to front or not.
* See BaseTaskRunnerConfiguration#showOutput for details.
*/
showOutput?: string;
/**
* @deprecated Use presentation options
* Controls whether the executed command is printed to the output windows as well.
*/
echoCommand?: boolean;
/**
* @deprecated Use presentation instead
*/
terminal?: PresentationOptionsConfig;
/**
* @deprecated Use inline commands.
* See BaseTaskRunnerConfiguration#suppressTaskName for details.
*/
suppressTaskName?: boolean;
/**
* Some commands require that the task argument is highlighted with a special
* prefix (e.g. /t: for msbuild). This property can be used to control such
* a prefix.
*/
taskSelector?: string;
/**
* @deprecated use the task type instead.
* Specifies whether the command is a shell command and therefore must
* be executed in a shell interpreter (e.g. cmd.exe, bash, ...).
*
* Defaults to false if omitted.
*/
isShellCommand?: boolean | ShellConfiguration;
}
export type CommandString = string | string[] | { value: string | string[], quoting: 'escape' | 'strong' | 'weak' };
export namespace CommandString {
export function value(value: CommandString): string {
if (Types.isString(value)) {
return value;
} else if (Types.isStringArray(value)) {
return value.join(' ');
} else {
if (Types.isString(value.value)) {
return value.value;
} else {
return value.value.join(' ');
}
}
}
}
export interface BaseCommandProperties {
/**
* The command to be executed. Can be an external program or a shell
* command.
*/
command?: CommandString;
/**
* The command options used when the command is executed. Can be omitted.
*/
options?: CommandOptionsConfig;
/**
* The arguments passed to the command or additional arguments passed to the
* command when using a global command.
*/
args?: CommandString[];
}
export interface CommandProperties extends BaseCommandProperties {
/**
* Windows specific command properties
*/
windows?: BaseCommandProperties;
/**
* OSX specific command properties
*/
osx?: BaseCommandProperties;
/**
* linux specific command properties
*/
linux?: BaseCommandProperties;
}
export interface GroupKind {
kind?: string;
isDefault?: boolean;
}
export interface ConfigurationProperties {
/**
* The task's name
*/
taskName?: string;
/**
* The UI label used for the task.
*/
label?: string;
/**
* An optional indentifier which can be used to reference a task
* in a dependsOn or other attributes.
*/
identifier?: string;
/**
* Whether the executed command is kept alive and runs in the background.
*/
isBackground?: boolean;
/**
* Whether the task should prompt on close for confirmation if running.
*/
promptOnClose?: boolean;
/**
* Defines the group the task belongs too.
*/
group?: string | GroupKind;
/**
* The other tasks the task depend on
*/
dependsOn?: string | TaskIdentifier | Array<string | TaskIdentifier>;
/**
* Controls the behavior of the used terminal
*/
presentation?: PresentationOptionsConfig;
/**
* Controls shell options.
*/
options?: CommandOptionsConfig;
/**
* The problem matcher(s) to use to capture problems in the tasks
* output.
*/
problemMatcher?: ProblemMatcherConfig.ProblemMatcherType;
/**
* Task run options. Control run related properties.
*/
runOptions?: RunOptionsConfig;
}
export interface CustomTask extends CommandProperties, ConfigurationProperties {
/**
* Custom tasks have the type CUSTOMIZED_TASK_TYPE
*/
type?: string;
}
export interface ConfiguringTask extends ConfigurationProperties {
/**
* The contributed type of the task
*/
type?: string;
}
/**
* The base task runner configuration
*/
export interface BaseTaskRunnerConfiguration {
/**
* The command to be executed. Can be an external program or a shell
* command.
*/
command?: CommandString;
/**
* @deprecated Use type instead
*
* Specifies whether the command is a shell command and therefore must
* be executed in a shell interpreter (e.g. cmd.exe, bash, ...).
*
* Defaults to false if omitted.
*/
isShellCommand?: boolean;
/**
* The task type
*/
type?: string;
/**
* The command options used when the command is executed. Can be omitted.
*/
options?: CommandOptionsConfig;
/**
* The arguments passed to the command. Can be omitted.
*/
args?: CommandString[];
/**
* Controls whether the output view of the running tasks is brought to front or not.
* Valid values are:
* "always": bring the output window always to front when a task is executed.
* "silent": only bring it to front if no problem matcher is defined for the task executed.
* "never": never bring the output window to front.
*
* If omitted "always" is used.
*/
showOutput?: string;
/**
* Controls whether the executed command is printed to the output windows as well.
*/
echoCommand?: boolean;
/**
* The group
*/
group?: string | GroupKind;
/**
* Controls the behavior of the used terminal
*/
presentation?: PresentationOptionsConfig;
/**
* If set to false the task name is added as an additional argument to the
* command when executed. If set to true the task name is suppressed. If
* omitted false is used.
*/
suppressTaskName?: boolean;
/**
* Some commands require that the task argument is highlighted with a special
* prefix (e.g. /t: for msbuild). This property can be used to control such
* a prefix.
*/
taskSelector?: string;
/**
* The problem matcher(s) to used if a global command is exucuted (e.g. no tasks
* are defined). A tasks.json file can either contain a global problemMatcher
* property or a tasks property but not both.
*/
problemMatcher?: ProblemMatcherConfig.ProblemMatcherType;
/**
* @deprecated Use `isBackground` instead.
*
* Specifies whether a global command is a watching the filesystem. A task.json
* file can either contain a global isWatching property or a tasks property
* but not both.
*/
isWatching?: boolean;
/**
* Specifies whether a global command is a background task.
*/
isBackground?: boolean;
/**
* Whether the task should prompt on close for confirmation if running.
*/
promptOnClose?: boolean;
/**
* The configuration of the available tasks. A tasks.json file can either
* contain a global problemMatcher property or a tasks property but not both.
*/
tasks?: Array<CustomTask | ConfiguringTask>;
/**
* Problem matcher declarations.
*/
declares?: ProblemMatcherConfig.NamedProblemMatcher[];
/**
* Optional user input variables.
*/
inputs?: ConfiguredInput[];
}
/**
* A configuration of an external build system. BuildConfiguration.buildSystem
* must be set to 'program'
*/
export interface ExternalTaskRunnerConfiguration extends BaseTaskRunnerConfiguration {
_runner?: string;
/**
* Determines the runner to use
*/
runner?: string;
/**
* The config's version number
*/
version: string;
/**
* Windows specific task configuration
*/
windows?: BaseTaskRunnerConfiguration;
/**
* Mac specific task configuration
*/
osx?: BaseTaskRunnerConfiguration;
/**
* Linux speciif task configuration
*/
linux?: BaseTaskRunnerConfiguration;
}
enum ProblemMatcherKind {
Unknown,
String,
ProblemMatcher,
Array
}
const EMPTY_ARRAY: any[] = [];
Object.freeze(EMPTY_ARRAY);
function assignProperty<T, K extends keyof T>(target: T, source: Partial<T>, key: K) {
const sourceAtKey = source[key];
if (sourceAtKey !== undefined) {
target[key] = sourceAtKey!;
}
}
function fillProperty<T, K extends keyof T>(target: T, source: Partial<T>, key: K) {
const sourceAtKey = source[key];
if (target[key] === undefined && sourceAtKey !== undefined) {
target[key] = sourceAtKey!;
}
}
interface ParserType<T> {
isEmpty(value: T | undefined): boolean;
assignProperties(target: T | undefined, source: T | undefined): T | undefined;
fillProperties(target: T | undefined, source: T | undefined): T | undefined;
fillDefaults(value: T | undefined, context: ParseContext): T | undefined;
freeze(value: T): Readonly<T> | undefined;
}
interface MetaData<T, U> {
property: keyof T;
type?: ParserType<U>;
}
function _isEmpty<T>(this: void, value: T, properties: MetaData<T, any>[] | undefined): boolean {
if (value === undefined || value === null || properties === undefined) {
return true;
}
for (let meta of properties) {
let property = value[meta.property];
if (property !== undefined && property !== null) {
if (meta.type !== undefined && !meta.type.isEmpty(property)) {
return false;
} else if (!Array.isArray(property) || property.length > 0) {
return false;
}
}
}
return true;
}
function _assignProperties<T>(this: void, target: T, source: T, properties: MetaData<T, any>[]): T {
if (_isEmpty(source, properties)) {
return target;
}
if (_isEmpty(target, properties)) {
return source;
}
for (let meta of properties) {
let property = meta.property;
let value: any;
if (meta.type !== undefined) {
value = meta.type.assignProperties(target[property], source[property]);
} else {
value = source[property];
}
if (value !== undefined && value !== null) {
target[property] = value;
}
}
return target;
}
function _fillProperties<T>(this: void, target: T, source: T, properties: MetaData<T, any>[] | undefined): T {
if (_isEmpty(source, properties)) {
return target;
}
if (_isEmpty(target, properties)) {
return source;
}
for (let meta of properties!) {
let property = meta.property;
let value: any;
if (meta.type) {
value = meta.type.fillProperties(target[property], source[property]);
} else if (target[property] === undefined) {
value = source[property];
}
if (value !== undefined && value !== null) {
target[property] = value;
}
}
return target;
}
function _fillDefaults<T>(this: void, target: T, defaults: T, properties: MetaData<T, any>[], context: ParseContext): T | undefined {
if (target && Object.isFrozen(target)) {
return target;
}
if (target === undefined || target === null) {
if (defaults !== undefined && defaults !== null) {
return Objects.deepClone(defaults);
} else {
return undefined;
}
}
for (let meta of properties) {
let property = meta.property;
if (target[property] !== undefined) {
continue;
}
let value: any;
if (meta.type) {
value = meta.type.fillDefaults(target[property], context);
} else {
value = defaults[property];
}
if (value !== undefined && value !== null) {
target[property] = value;
}
}
return target;
}
function _freeze<T>(this: void, target: T, properties: MetaData<T, any>[]): Readonly<T> | undefined {
if (target === undefined || target === null) {
return undefined;
}
if (Object.isFrozen(target)) {
return target;
}
for (let meta of properties) {
if (meta.type) {
let value = target[meta.property];
if (value) {
meta.type.freeze(value);
}
}
}
Object.freeze(target);
return target;
}
export namespace RunOnOptions {
export function fromString(value: string | undefined): Tasks.RunOnOptions {
if (!value) {
return Tasks.RunOnOptions.default;
}
switch (value.toLowerCase()) {
case 'folderopen':
return Tasks.RunOnOptions.folderOpen;
case 'default':
default:
return Tasks.RunOnOptions.default;
}
}
}
export namespace RunOptions {
export function fromConfiguration(value: RunOptionsConfig | undefined): Tasks.RunOptions {
return {
reevaluateOnRerun: value ? value.reevaluateOnRerun : true,
runOn: value ? RunOnOptions.fromString(value.runOn) : Tasks.RunOnOptions.default
};
}
}
class ParseContext {
workspaceFolder: IWorkspaceFolder;
problemReporter: IProblemReporter;
namedProblemMatchers: IStringDictionary<NamedProblemMatcher>;
uuidMap: UUIDMap;
engine: Tasks.ExecutionEngine;
schemaVersion: Tasks.JsonSchemaVersion;
platform: Platform;
taskLoadIssues: string[];
}
namespace ShellConfiguration {
const properties: MetaData<Tasks.ShellConfiguration, void>[] = [{ property: 'executable' }, { property: 'args' }, { property: 'quoting' }];
export function is(value: any): value is ShellConfiguration {
let candidate: ShellConfiguration = value;
return candidate && (Types.isString(candidate.executable) || Types.isStringArray(candidate.args));
}
export function from(this: void, config: ShellConfiguration | undefined, context: ParseContext): Tasks.ShellConfiguration | undefined {
if (!is(config)) {
return undefined;
}
let result: ShellConfiguration = {};
if (config.executable !== undefined) {
result.executable = config.executable;
}
if (config.args !== undefined) {
result.args = config.args.slice();
}
if (config.quoting !== undefined) {
result.quoting = Objects.deepClone(config.quoting);
}
return result;
}
export function isEmpty(this: void, value: Tasks.ShellConfiguration): boolean {
return _isEmpty(value, properties);
}
export function assignProperties(this: void, target: Tasks.ShellConfiguration | undefined, source: Tasks.ShellConfiguration | undefined): Tasks.ShellConfiguration | undefined {
return _assignProperties(target, source, properties);
}
export function fillProperties(this: void, target: Tasks.ShellConfiguration, source: Tasks.ShellConfiguration): Tasks.ShellConfiguration {
return _fillProperties(target, source, properties);
}
export function fillDefaults(this: void, value: Tasks.ShellConfiguration, context: ParseContext): Tasks.ShellConfiguration {
return value;
}
export function freeze(this: void, value: Tasks.ShellConfiguration): Readonly<Tasks.ShellConfiguration> | undefined {
if (!value) {
return undefined;
}
return Object.freeze(value);
}
}
namespace CommandOptions {
const properties: MetaData<Tasks.CommandOptions, Tasks.ShellConfiguration>[] = [{ property: 'cwd' }, { property: 'env' }, { property: 'shell', type: ShellConfiguration }];
const defaults: CommandOptionsConfig = { cwd: '${workspaceFolder}' };
export function from(this: void, options: CommandOptionsConfig, context: ParseContext): Tasks.CommandOptions | undefined {
let result: Tasks.CommandOptions = {};
if (options.cwd !== undefined) {
if (Types.isString(options.cwd)) {
result.cwd = options.cwd;
} else {
context.taskLoadIssues.push(nls.localize('ConfigurationParser.invalidCWD', 'Warning: options.cwd must be of type string. Ignoring value {0}\n', options.cwd));
}
}
if (options.env !== undefined) {
result.env = Objects.deepClone(options.env);
}
result.shell = ShellConfiguration.from(options.shell, context);
return isEmpty(result) ? undefined : result;
}
export function isEmpty(value: Tasks.CommandOptions | undefined): boolean {
return _isEmpty(value, properties);
}
export function assignProperties(target: Tasks.CommandOptions | undefined, source: Tasks.CommandOptions | undefined): Tasks.CommandOptions | undefined {
if ((source === undefined) || isEmpty(source)) {
return target;
}
if ((target === undefined) || isEmpty(target)) {
return source;
}
assignProperty(target, source, 'cwd');
if (target.env === undefined) {
target.env = source.env;
} else if (source.env !== undefined) {
let env: { [key: string]: string; } = Object.create(null);
if (target.env !== undefined) {
Object.keys(target.env).forEach(key => env[key] = target.env![key]);
}
if (source.env !== undefined) {
Object.keys(source.env).forEach(key => env[key] = source.env![key]);
}
target.env = env;
}
target.shell = ShellConfiguration.assignProperties(target.shell, source.shell);
return target;
}
export function fillProperties(target: Tasks.CommandOptions | undefined, source: Tasks.CommandOptions | undefined): Tasks.CommandOptions | undefined {
return _fillProperties(target, source, properties);
}
export function fillDefaults(value: Tasks.CommandOptions | undefined, context: ParseContext): Tasks.CommandOptions | undefined {
return _fillDefaults(value, defaults, properties, context);
}
export function freeze(value: Tasks.CommandOptions): Readonly<Tasks.CommandOptions> | undefined {
return _freeze(value, properties);
}
}
namespace CommandConfiguration {
export namespace PresentationOptions {
const properties: MetaData<Tasks.PresentationOptions, void>[] = [{ property: 'echo' }, { property: 'reveal' }, { property: 'focus' }, { property: 'panel' }, { property: 'showReuseMessage' }, { property: 'clear' }, { property: 'group' }];
interface PresentationOptionsShape extends LegacyCommandProperties {
presentation?: PresentationOptionsConfig;
}
export function from(this: void, config: PresentationOptionsShape, context: ParseContext): Tasks.PresentationOptions | undefined {
let echo: boolean;
let reveal: Tasks.RevealKind;
let focus: boolean;
let panel: Tasks.PanelKind;
let showReuseMessage: boolean;
let clear: boolean;
let group: string | undefined;
let hasProps = false;
if (Types.isBoolean(config.echoCommand)) {
echo = config.echoCommand;
hasProps = true;
}
if (Types.isString(config.showOutput)) {
reveal = Tasks.RevealKind.fromString(config.showOutput);
hasProps = true;
}
let presentation = config.presentation || config.terminal;
if (presentation) {
if (Types.isBoolean(presentation.echo)) {
echo = presentation.echo;
}
if (Types.isString(presentation.reveal)) {
reveal = Tasks.RevealKind.fromString(presentation.reveal);
}
if (Types.isBoolean(presentation.focus)) {
focus = presentation.focus;
}
if (Types.isString(presentation.panel)) {
panel = Tasks.PanelKind.fromString(presentation.panel);
}
if (Types.isBoolean(presentation.showReuseMessage)) {
showReuseMessage = presentation.showReuseMessage;
}
if (Types.isBoolean(presentation.clear)) {
clear = presentation.clear;
}
if (Types.isString(presentation.group)) {
group = presentation.group;
}
hasProps = true;
}
if (!hasProps) {
return undefined;
}
return { echo: echo!, reveal: reveal!, focus: focus!, panel: panel!, showReuseMessage: showReuseMessage!, clear: clear!, group };
}
export function assignProperties(target: Tasks.PresentationOptions, source: Tasks.PresentationOptions | undefined): Tasks.PresentationOptions | undefined {
return _assignProperties(target, source, properties);
}
export function fillProperties(target: Tasks.PresentationOptions, source: Tasks.PresentationOptions | undefined): Tasks.PresentationOptions | undefined {
return _fillProperties(target, source, properties);
}
export function fillDefaults(value: Tasks.PresentationOptions, context: ParseContext): Tasks.PresentationOptions | undefined {
let defaultEcho = context.engine === Tasks.ExecutionEngine.Terminal ? true : false;
return _fillDefaults(value, { echo: defaultEcho, reveal: Tasks.RevealKind.Always, focus: false, panel: Tasks.PanelKind.Shared, showReuseMessage: true, clear: false }, properties, context);
}
export function freeze(value: Tasks.PresentationOptions): Readonly<Tasks.PresentationOptions> | undefined {
return _freeze(value, properties);
}
export function isEmpty(this: void, value: Tasks.PresentationOptions): boolean {
return _isEmpty(value, properties);
}
}
namespace ShellString {
export function from(this: void, value: CommandString | undefined): Tasks.CommandString | undefined {
if (value === undefined || value === null) {
return undefined;
}
if (Types.isString(value)) {
return value;
} else if (Types.isStringArray(value)) {
return value.join(' ');
} else {
let quoting = Tasks.ShellQuoting.from(value.quoting);
let result = Types.isString(value.value) ? value.value : Types.isStringArray(value.value) ? value.value.join(' ') : undefined;
if (result) {
return {
value: result,
quoting: quoting
};
} else {
return undefined;
}
}
}
}
interface BaseCommandConfiguationShape extends BaseCommandProperties, LegacyCommandProperties {
}
interface CommandConfiguationShape extends BaseCommandConfiguationShape {
windows?: BaseCommandConfiguationShape;
osx?: BaseCommandConfiguationShape;
linux?: BaseCommandConfiguationShape;
}
const properties: MetaData<Tasks.CommandConfiguration, any>[] = [
{ property: 'runtime' }, { property: 'name' }, { property: 'options', type: CommandOptions },
{ property: 'args' }, { property: 'taskSelector' }, { property: 'suppressTaskName' },
{ property: 'presentation', type: PresentationOptions }
];
export function from(this: void, config: CommandConfiguationShape, context: ParseContext): Tasks.CommandConfiguration | undefined {
let result: Tasks.CommandConfiguration = fromBase(config, context)!;
let osConfig: Tasks.CommandConfiguration | undefined = undefined;
if (config.windows && context.platform === Platform.Windows) {
osConfig = fromBase(config.windows, context);
} else if (config.osx && context.platform === Platform.Mac) {
osConfig = fromBase(config.osx, context);
} else if (config.linux && context.platform === Platform.Linux) {
osConfig = fromBase(config.linux, context);
}
if (osConfig) {
result = assignProperties(result, osConfig, context.schemaVersion === Tasks.JsonSchemaVersion.V2_0_0);
}
return isEmpty(result) ? undefined : result;
}
function fromBase(this: void, config: BaseCommandConfiguationShape, context: ParseContext): Tasks.CommandConfiguration | undefined {
let name: Tasks.CommandString = ShellString.from(config.command)!;
let runtime: Tasks.RuntimeType;
if (Types.isString(config.type)) {
if (config.type === 'shell' || config.type === 'process') {
runtime = Tasks.RuntimeType.fromString(config.type);
}
}
let isShellConfiguration = ShellConfiguration.is(config.isShellCommand);
if (Types.isBoolean(config.isShellCommand) || isShellConfiguration) {
runtime = Tasks.RuntimeType.Shell;
} else if (config.isShellCommand !== undefined) {
runtime = !!config.isShellCommand ? Tasks.RuntimeType.Shell : Tasks.RuntimeType.Process;
}
let result: Tasks.CommandConfiguration = {
name: name!,
runtime: runtime!,
presentation: PresentationOptions.from(config, context)!
};
if (config.args !== undefined) {
result.args = [];
for (let arg of config.args) {
let converted = ShellString.from(arg);
if (converted !== undefined) {
result.args.push(converted);
} else {
context.taskLoadIssues.push(
nls.localize(
'ConfigurationParser.inValidArg',
'Error: command argument must either be a string or a quoted string. Provided value is:\n{0}',
arg ? JSON.stringify(arg, undefined, 4) : 'undefined'
));
}
}
}
if (config.options !== undefined) {
result.options = CommandOptions.from(config.options, context);
if (result.options && result.options.shell === undefined && isShellConfiguration) {
result.options.shell = ShellConfiguration.from(config.isShellCommand as ShellConfiguration, context);
if (context.engine !== Tasks.ExecutionEngine.Terminal) {
context.taskLoadIssues.push(nls.localize('ConfigurationParser.noShell', 'Warning: shell configuration is only supported when executing tasks in the terminal.'));
}
}
}
if (Types.isString(config.taskSelector)) {
result.taskSelector = config.taskSelector;
}
if (Types.isBoolean(config.suppressTaskName)) {
result.suppressTaskName = config.suppressTaskName;
}
return isEmpty(result) ? undefined : result;
}
export function hasCommand(value: Tasks.CommandConfiguration): boolean {
return value && !!value.name;
}
export function isEmpty(value: Tasks.CommandConfiguration | undefined): boolean {
return _isEmpty(value, properties);
}
export function assignProperties(target: Tasks.CommandConfiguration, source: Tasks.CommandConfiguration, overwriteArgs: boolean): Tasks.CommandConfiguration {
if (isEmpty(source)) {
return target;
}
if (isEmpty(target)) {
return source;
}
assignProperty(target, source, 'name');
assignProperty(target, source, 'runtime');
assignProperty(target, source, 'taskSelector');
assignProperty(target, source, 'suppressTaskName');
if (source.args !== undefined) {
if (target.args === undefined || overwriteArgs) {
target.args = source.args;
} else {
target.args = target.args.concat(source.args);
}
}
target.presentation = PresentationOptions.assignProperties(target.presentation!, source.presentation)!;
target.options = CommandOptions.assignProperties(target.options, source.options);
return target;
}
export function fillProperties(target: Tasks.CommandConfiguration, source: Tasks.CommandConfiguration): Tasks.CommandConfiguration {
return _fillProperties(target, source, properties);
}
export function fillGlobals(target: Tasks.CommandConfiguration, source: Tasks.CommandConfiguration | undefined, taskName: string | undefined): Tasks.CommandConfiguration {
if ((source === undefined) || isEmpty(source)) {
return target;
}
target = target || {
name: undefined,
runtime: undefined,
presentation: undefined
};
if (target.name === undefined) {
fillProperty(target, source, 'name');
fillProperty(target, source, 'taskSelector');
fillProperty(target, source, 'suppressTaskName');
let args: Tasks.CommandString[] = source.args ? source.args.slice() : [];
if (!target.suppressTaskName && taskName) {
if (target.taskSelector !== undefined) {
args.push(target.taskSelector + taskName);
} else {
args.push(taskName);
}
}
if (target.args) {
args = args.concat(target.args);
}
target.args = args;
}
fillProperty(target, source, 'runtime');
target.presentation = PresentationOptions.fillProperties(target.presentation!, source.presentation)!;
target.options = CommandOptions.fillProperties(target.options, source.options);
return target;
}
export function fillDefaults(value: Tasks.CommandConfiguration | undefined, context: ParseContext): void {
if (!value || Object.isFrozen(value)) {
return;
}
if (value.name !== undefined && value.runtime === undefined) {
value.runtime = Tasks.RuntimeType.Process;
}
value.presentation = PresentationOptions.fillDefaults(value.presentation!, context)!;
if (!isEmpty(value)) {
value.options = CommandOptions.fillDefaults(value.options, context);
}
if (value.args === undefined) {
value.args = EMPTY_ARRAY;
}
if (value.suppressTaskName === undefined) {
value.suppressTaskName = false;
}
}
export function freeze(value: Tasks.CommandConfiguration): Readonly<Tasks.CommandConfiguration> | undefined {
return _freeze(value, properties);
}
}
namespace ProblemMatcherConverter {
export function namedFrom(this: void, declares: ProblemMatcherConfig.NamedProblemMatcher[] | undefined, context: ParseContext): IStringDictionary<NamedProblemMatcher> {
let result: IStringDictionary<NamedProblemMatcher> = Object.create(null);
if (!Types.isArray(declares)) {
return result;
}
(<ProblemMatcherConfig.NamedProblemMatcher[]>declares).forEach((value) => {
let namedProblemMatcher = (new ProblemMatcherParser(context.problemReporter)).parse(value);
if (isNamedProblemMatcher(namedProblemMatcher)) {
result[namedProblemMatcher.name] = namedProblemMatcher;
} else {
context.problemReporter.error(nls.localize('ConfigurationParser.noName', 'Error: Problem Matcher in declare scope must have a name:\n{0}\n', JSON.stringify(value, undefined, 4)));
}
});
return result;
}
export function from(this: void, config: ProblemMatcherConfig.ProblemMatcherType | undefined, context: ParseContext): ProblemMatcher[] {
let result: ProblemMatcher[] = [];
if (config === undefined) {
return result;
}
let kind = getProblemMatcherKind(config);
if (kind === ProblemMatcherKind.Unknown) {
context.problemReporter.warn(nls.localize(
'ConfigurationParser.unknownMatcherKind',
'Warning: the defined problem matcher is unknown. Supported types are string | ProblemMatcher | Array<string | ProblemMatcher>.\n{0}\n',
JSON.stringify(config, null, 4)));
return result;
} else if (kind === ProblemMatcherKind.String || kind === ProblemMatcherKind.ProblemMatcher) {
let matcher = resolveProblemMatcher(config as ProblemMatcherConfig.ProblemMatcher, context);
if (matcher) {
result.push(matcher);
}
} else if (kind === ProblemMatcherKind.Array) {
let problemMatchers = <(string | ProblemMatcherConfig.ProblemMatcher)[]>config;
problemMatchers.forEach(problemMatcher => {
let matcher = resolveProblemMatcher(problemMatcher, context);
if (matcher) {
result.push(matcher);
}
});
}
return result;
}
function getProblemMatcherKind(this: void, value: ProblemMatcherConfig.ProblemMatcherType): ProblemMatcherKind {
if (Types.isString(value)) {
return ProblemMatcherKind.String;
} else if (Types.isArray(value)) {
return ProblemMatcherKind.Array;
} else if (!Types.isUndefined(value)) {
return ProblemMatcherKind.ProblemMatcher;
} else {
return ProblemMatcherKind.Unknown;
}
}
function resolveProblemMatcher(this: void, value: string | ProblemMatcherConfig.ProblemMatcher, context: ParseContext): ProblemMatcher | undefined {
if (Types.isString(value)) {
let variableName = <string>value;
if (variableName.length > 1 && variableName[0] === '$') {
variableName = variableName.substring(1);
let global = ProblemMatcherRegistry.get(variableName);
if (global) {
return Objects.deepClone(global);
}
let localProblemMatcher = context.namedProblemMatchers[variableName];
if (localProblemMatcher) {
localProblemMatcher = Objects.deepClone(localProblemMatcher);
// remove the name
delete localProblemMatcher.name;
return localProblemMatcher;
}
}
context.taskLoadIssues.push(nls.localize('ConfigurationParser.invalidVaraibleReference', 'Error: Invalid problemMatcher reference: {0}\n', value));
return undefined;
} else {
let json = <ProblemMatcherConfig.ProblemMatcher>value;
return new ProblemMatcherParser(context.problemReporter).parse(json);
}
}
}
const source: Partial<Tasks.TaskSource> = {
kind: Tasks.TaskSourceKind.Workspace,
label: 'Workspace',
config: undefined
};
namespace GroupKind {
export function from(this: void, external: string | GroupKind | undefined): [string, Tasks.GroupType] | undefined {
if (external === undefined) {
return undefined;
}
if (Types.isString(external)) {
if (Tasks.TaskGroup.is(external)) {
return [external, Tasks.GroupType.user];
} else {
return undefined;
}
}
if (!Types.isString(external.kind) || !Tasks.TaskGroup.is(external.kind)) {
return undefined;
}
let group: string = external.kind;
let isDefault: boolean = !!external.isDefault;
return [group, isDefault ? Tasks.GroupType.default : Tasks.GroupType.user];
}
}
namespace TaskDependency {
export function from(this: void, external: string | TaskIdentifier, context: ParseContext): Tasks.TaskDependency | undefined {
if (Types.isString(external)) {
return { workspaceFolder: context.workspaceFolder, task: external };
} else if (TaskIdentifier.is(external)) {
return { workspaceFolder: context.workspaceFolder, task: TaskDefinition.createTaskIdentifier(external as Tasks.TaskIdentifier, context.problemReporter) };
} else {
return undefined;
}
}
}
namespace ConfigurationProperties {
const properties: MetaData<Tasks.ConfigurationProperties, any>[] = [
{ property: 'name' }, { property: 'identifier' }, { property: 'group' }, { property: 'isBackground' },
{ property: 'promptOnClose' }, { property: 'dependsOn' },
{ property: 'presentation', type: CommandConfiguration.PresentationOptions }, { property: 'problemMatchers' }
];
export function from(this: void, external: ConfigurationProperties, context: ParseContext, includeCommandOptions: boolean): Tasks.ConfigurationProperties | undefined {
if (!external) {
return undefined;
}
let result: Tasks.ConfigurationProperties = {};
if (Types.isString(external.taskName)) {
result.name = external.taskName;
}
if (Types.isString(external.label) && context.schemaVersion === Tasks.JsonSchemaVersion.V2_0_0) {
result.name = external.label;
}
if (Types.isString(external.identifier)) {
result.identifier = external.identifier;
}
if (external.isBackground !== undefined) {
result.isBackground = !!external.isBackground;
}
if (external.promptOnClose !== undefined) {
result.promptOnClose = !!external.promptOnClose;
}
if (external.group !== undefined) {
if (Types.isString(external.group) && Tasks.TaskGroup.is(external.group)) {
result.group = external.group;
result.groupType = Tasks.GroupType.user;
} else {
let values = GroupKind.from(external.group);
if (values) {
result.group = values[0];
result.groupType = values[1];
}
}
}
if (external.dependsOn !== undefined) {
if (Types.isArray(external.dependsOn)) {
result.dependsOn = external.dependsOn.reduce((dependencies: Tasks.TaskDependency[], item): Tasks.TaskDependency[] => {
const dependency = TaskDependency.from(item, context);
if (dependency) {
dependencies.push(dependency);
}
return dependencies;
}, []);
} else {
const dependsOnValue = TaskDependency.from(external.dependsOn, context);
result.dependsOn = dependsOnValue ? [dependsOnValue] : undefined;
}
}
if (includeCommandOptions && (external.presentation !== undefined || (external as LegacyCommandProperties).terminal !== undefined)) {
result.presentation = CommandConfiguration.PresentationOptions.from(external, context);
}
if (includeCommandOptions && (external.options !== undefined)) {
result.options = CommandOptions.from(external.options, context);
}
if (external.problemMatcher) {
result.problemMatchers = ProblemMatcherConverter.from(external.problemMatcher, context);
}
return isEmpty(result) ? undefined : result;
}
export function isEmpty(this: void, value: Tasks.ConfigurationProperties): boolean {
return _isEmpty(value, properties);
}
}
namespace ConfiguringTask {
const grunt = 'grunt.';
const jake = 'jake.';
const gulp = 'gulp.';
const npm = 'vscode.npm.';
const typescript = 'vscode.typescript.';
interface CustomizeShape {
customize: string;
}
export function from(this: void, external: ConfiguringTask, context: ParseContext, index: number): Tasks.ConfiguringTask | undefined {
if (!external) {
return undefined;
}
let type = external.type;
let customize = (external as CustomizeShape).customize;
if (!type && !customize) {
context.problemReporter.error(nls.localize('ConfigurationParser.noTaskType', 'Error: tasks configuration must have a type property. The configuration will be ignored.\n{0}\n', JSON.stringify(external, null, 4)));
return undefined;
}
let typeDeclaration = type ? TaskDefinitionRegistry.get(type) : undefined;
if (!typeDeclaration) {
let message = nls.localize('ConfigurationParser.noTypeDefinition', 'Error: there is no registered task type \'{0}\'. Did you miss to install an extension that provides a corresponding task provider?', type);
context.problemReporter.error(message);
return undefined;
}
let identifier: Tasks.TaskIdentifier | undefined;
if (Types.isString(customize)) {
if (customize.indexOf(grunt) === 0) {
identifier = { type: 'grunt', task: customize.substring(grunt.length) };
} else if (customize.indexOf(jake) === 0) {
identifier = { type: 'jake', task: customize.substring(jake.length) };
} else if (customize.indexOf(gulp) === 0) {
identifier = { type: 'gulp', task: customize.substring(gulp.length) };
} else if (customize.indexOf(npm) === 0) {
identifier = { type: 'npm', script: customize.substring(npm.length + 4) };
} else if (customize.indexOf(typescript) === 0) {
identifier = { type: 'typescript', tsconfig: customize.substring(typescript.length + 6) };
}
} else {
if (Types.isString(external.type)) {
identifier = external as Tasks.TaskIdentifier;
}
}
if (identifier === undefined) {
context.problemReporter.error(nls.localize(
'ConfigurationParser.missingType',
'Error: the task configuration \'{0}\' is missing the required property \'type\'. The task configuration will be ignored.', JSON.stringify(external, undefined, 0)
));
return undefined;
}
let taskIdentifier: Tasks.KeyedTaskIdentifier | undefined = TaskDefinition.createTaskIdentifier(identifier, context.problemReporter);
if (taskIdentifier === undefined) {
context.problemReporter.error(nls.localize(
'ConfigurationParser.incorrectType',
'Error: the task configuration \'{0}\' is using an unknown type. The task configuration will be ignored.', JSON.stringify(external, undefined, 0)
));
return undefined;
}
let configElement: Tasks.TaskSourceConfigElement = {
workspaceFolder: context.workspaceFolder,
file: '.vscode\\tasks.json',
index,
element: external
};
let result: Tasks.ConfiguringTask = new Tasks.ConfiguringTask(
`${typeDeclaration.extensionId}.${taskIdentifier._key}`,
Objects.assign({} as Tasks.WorkspaceTaskSource, source, { config: configElement }),
undefined,
type,
taskIdentifier,
RunOptions.fromConfiguration(external.runOptions),
{}
);
let configuration = ConfigurationProperties.from(external, context, true);
if (configuration) {
result.configurationProperties = Objects.assign(result.configurationProperties, configuration);
if (result.configurationProperties.name) {
result._label = result.configurationProperties.name;
} else {
let label = result.configures.type;
if (typeDeclaration.required && typeDeclaration.required.length > 0) {
for (let required of typeDeclaration.required) {
let value = result.configures[required];
if (value) {
label = label + ' ' + value;
break;
}
}
}
result._label = label;
}
if (!result.configurationProperties.identifier) {
result.configurationProperties.identifier = taskIdentifier._key;
}
}
return result;
}
}
namespace CustomTask {
export function from(this: void, external: CustomTask, context: ParseContext, index: number): Tasks.CustomTask | undefined {
if (!external) {
return undefined;
}
let type = external.type;
if (type === undefined || type === null) {
type = Tasks.CUSTOMIZED_TASK_TYPE;
}
if (type !== Tasks.CUSTOMIZED_TASK_TYPE && type !== 'shell' && type !== 'process') {
context.problemReporter.error(nls.localize('ConfigurationParser.notCustom', 'Error: tasks is not declared as a custom task. The configuration will be ignored.\n{0}\n', JSON.stringify(external, null, 4)));
return undefined;
}
let taskName = external.taskName;
if (Types.isString(external.label) && context.schemaVersion === Tasks.JsonSchemaVersion.V2_0_0) {
taskName = external.label;
}
if (!taskName) {
context.problemReporter.error(nls.localize('ConfigurationParser.noTaskName', 'Error: a task must provide a label property. The task will be ignored.\n{0}\n', JSON.stringify(external, null, 4)));
return undefined;
}
let result: Tasks.CustomTask = new Tasks.CustomTask(
context.uuidMap.getUUID(taskName),
Objects.assign({} as Tasks.WorkspaceTaskSource, source, { config: { index, element: external, file: '.vscode\\tasks.json', workspaceFolder: context.workspaceFolder } }),
taskName,
Tasks.CUSTOMIZED_TASK_TYPE,
undefined,
false,
RunOptions.fromConfiguration(external.runOptions),
{
name: taskName,
identifier: taskName,
}
);
let configuration = ConfigurationProperties.from(external, context, false);
if (configuration) {
result.configurationProperties = Objects.assign(result.configurationProperties, configuration);
}
let supportLegacy: boolean = true; //context.schemaVersion === Tasks.JsonSchemaVersion.V2_0_0;
if (supportLegacy) {
let legacy: LegacyTaskProperties = external as LegacyTaskProperties;
if (result.configurationProperties.isBackground === undefined && legacy.isWatching !== undefined) {
result.configurationProperties.isBackground = !!legacy.isWatching;
}
if (result.configurationProperties.group === undefined) {
if (legacy.isBuildCommand === true) {
result.configurationProperties.group = Tasks.TaskGroup.Build;
} else if (legacy.isTestCommand === true) {
result.configurationProperties.group = Tasks.TaskGroup.Test;
}
}
}
let command: Tasks.CommandConfiguration = CommandConfiguration.from(external, context)!;
if (command) {
result.command = command;
}
if (external.command !== undefined) {
// if the task has its own command then we suppress the
// task name by default.
command.suppressTaskName = true;
}
return result;
}
export function fillGlobals(task: Tasks.CustomTask, globals: Globals): void {
// We only merge a command from a global definition if there is no dependsOn
// or there is a dependsOn and a defined command.
if (CommandConfiguration.hasCommand(task.command) || task.configurationProperties.dependsOn === undefined) {
task.command = CommandConfiguration.fillGlobals(task.command, globals.command, task.configurationProperties.name);
}
if (task.configurationProperties.problemMatchers === undefined && globals.problemMatcher !== undefined) {
task.configurationProperties.problemMatchers = Objects.deepClone(globals.problemMatcher);
task.hasDefinedMatchers = true;
}
// promptOnClose is inferred from isBackground if available
if (task.configurationProperties.promptOnClose === undefined && task.configurationProperties.isBackground === undefined && globals.promptOnClose !== undefined) {
task.configurationProperties.promptOnClose = globals.promptOnClose;
}
}
export function fillDefaults(task: Tasks.CustomTask, context: ParseContext): void {
CommandConfiguration.fillDefaults(task.command, context);
if (task.configurationProperties.promptOnClose === undefined) {
task.configurationProperties.promptOnClose = task.configurationProperties.isBackground !== undefined ? !task.configurationProperties.isBackground : true;
}
if (task.configurationProperties.isBackground === undefined) {
task.configurationProperties.isBackground = false;
}
if (task.configurationProperties.problemMatchers === undefined) {
task.configurationProperties.problemMatchers = EMPTY_ARRAY;
}
if (task.configurationProperties.group !== undefined && task.configurationProperties.groupType === undefined) {
task.configurationProperties.groupType = Tasks.GroupType.user;
}
}
export function createCustomTask(contributedTask: Tasks.ContributedTask, configuredProps: Tasks.ConfiguringTask | Tasks.CustomTask): Tasks.CustomTask {
let result: Tasks.CustomTask = new Tasks.CustomTask(
configuredProps._id,
Objects.assign({}, configuredProps._source, { customizes: contributedTask.defines }),
configuredProps.configurationProperties.name || contributedTask._label,
Tasks.CUSTOMIZED_TASK_TYPE,
contributedTask.command,
false,
contributedTask.runOptions,
{
name: configuredProps.configurationProperties.name || contributedTask.configurationProperties.name,
identifier: configuredProps.configurationProperties.identifier || contributedTask.configurationProperties.identifier,
}
);
result.addTaskLoadMessages(configuredProps.taskLoadMessages);
let resultConfigProps: Tasks.ConfigurationProperties = result.configurationProperties;
assignProperty(resultConfigProps, configuredProps.configurationProperties, 'group');
assignProperty(resultConfigProps, configuredProps.configurationProperties, 'groupType');
assignProperty(resultConfigProps, configuredProps.configurationProperties, 'isBackground');
assignProperty(resultConfigProps, configuredProps.configurationProperties, 'dependsOn');
assignProperty(resultConfigProps, configuredProps.configurationProperties, 'problemMatchers');
assignProperty(resultConfigProps, configuredProps.configurationProperties, 'promptOnClose');
result.command.presentation = CommandConfiguration.PresentationOptions.assignProperties(
result.command.presentation!, configuredProps.configurationProperties.presentation)!;
result.command.options = CommandOptions.assignProperties(result.command.options, configuredProps.configurationProperties.options);
let contributedConfigProps: Tasks.ConfigurationProperties = contributedTask.configurationProperties;
fillProperty(resultConfigProps, contributedConfigProps, 'group');
fillProperty(resultConfigProps, contributedConfigProps, 'groupType');
fillProperty(resultConfigProps, contributedConfigProps, 'isBackground');
fillProperty(resultConfigProps, contributedConfigProps, 'dependsOn');
fillProperty(resultConfigProps, contributedConfigProps, 'problemMatchers');
fillProperty(resultConfigProps, contributedConfigProps, 'promptOnClose');
result.command.presentation = CommandConfiguration.PresentationOptions.fillProperties(
result.command.presentation!, contributedConfigProps.presentation)!;
result.command.options = CommandOptions.fillProperties(result.command.options, contributedConfigProps.options);
if (contributedTask.hasDefinedMatchers === true) {
result.hasDefinedMatchers = true;
}
return result;
}
}
interface TaskParseResult {
custom: Tasks.CustomTask[];
configured: Tasks.ConfiguringTask[];
}
namespace TaskParser {
function isCustomTask(value: CustomTask | ConfiguringTask): value is CustomTask {
let type = value.type;
let customize = (value as any).customize;
return customize === undefined && (type === undefined || type === null || type === Tasks.CUSTOMIZED_TASK_TYPE || type === 'shell' || type === 'process');
}
export function from(this: void, externals: Array<CustomTask | ConfiguringTask> | undefined, globals: Globals, context: ParseContext): TaskParseResult {
let result: TaskParseResult = { custom: [], configured: [] };
if (!externals) {
return result;
}
let defaultBuildTask: { task: Tasks.Task | undefined; rank: number; } = { task: undefined, rank: -1 };
let defaultTestTask: { task: Tasks.Task | undefined; rank: number; } = { task: undefined, rank: -1 };
let schema2_0_0: boolean = context.schemaVersion === Tasks.JsonSchemaVersion.V2_0_0;
const baseLoadIssues = Objects.deepClone(context.taskLoadIssues);
for (let index = 0; index < externals.length; index++) {
let external = externals[index];
if (isCustomTask(external)) {
let customTask = CustomTask.from(external, context, index);
if (customTask) {
CustomTask.fillGlobals(customTask, globals);
CustomTask.fillDefaults(customTask, context);
if (schema2_0_0) {
if ((customTask.command === undefined || customTask.command.name === undefined) && (customTask.configurationProperties.dependsOn === undefined || customTask.configurationProperties.dependsOn.length === 0)) {
context.problemReporter.error(nls.localize(
'taskConfiguration.noCommandOrDependsOn', 'Error: the task \'{0}\' neither specifies a command nor a dependsOn property. The task will be ignored. Its definition is:\n{1}',
customTask.configurationProperties.name, JSON.stringify(external, undefined, 4)
));
continue;
}
} else {
if (customTask.command === undefined || customTask.command.name === undefined) {
context.problemReporter.warn(nls.localize(
'taskConfiguration.noCommand', 'Error: the task \'{0}\' doesn\'t define a command. The task will be ignored. Its definition is:\n{1}',
customTask.configurationProperties.name, JSON.stringify(external, undefined, 4)
));
continue;
}
}
if (customTask.configurationProperties.group === Tasks.TaskGroup.Build && defaultBuildTask.rank < 2) {
defaultBuildTask.task = customTask;
defaultBuildTask.rank = 2;
} else if (customTask.configurationProperties.group === Tasks.TaskGroup.Test && defaultTestTask.rank < 2) {
defaultTestTask.task = customTask;
defaultTestTask.rank = 2;
} else if (customTask.configurationProperties.name === 'build' && defaultBuildTask.rank < 1) {
defaultBuildTask.task = customTask;
defaultBuildTask.rank = 1;
} else if (customTask.configurationProperties.name === 'test' && defaultTestTask.rank < 1) {
defaultTestTask.task = customTask;
defaultTestTask.rank = 1;
}
customTask.addTaskLoadMessages(context.taskLoadIssues);
result.custom.push(customTask);
}
} else {
let configuredTask = ConfiguringTask.from(external, context, index);
if (configuredTask) {
configuredTask.addTaskLoadMessages(context.taskLoadIssues);
result.configured.push(configuredTask);
}
}
context.taskLoadIssues = Objects.deepClone(baseLoadIssues);
}
if ((defaultBuildTask.rank > -1) && (defaultBuildTask.rank < 2) && defaultBuildTask.task) {
defaultBuildTask.task.configurationProperties.group = Tasks.TaskGroup.Build;
defaultBuildTask.task.configurationProperties.groupType = Tasks.GroupType.user;
} else if ((defaultTestTask.rank > -1) && (defaultTestTask.rank < 2) && defaultTestTask.task) {
defaultTestTask.task.configurationProperties.group = Tasks.TaskGroup.Test;
defaultTestTask.task.configurationProperties.groupType = Tasks.GroupType.user;
}
return result;
}
export function assignTasks(target: Tasks.CustomTask[], source: Tasks.CustomTask[]): Tasks.CustomTask[] {
if (source === undefined || source.length === 0) {
return target;
}
if (target === undefined || target.length === 0) {
return source;
}
if (source) {
// Tasks are keyed by ID but we need to merge by name
let map: IStringDictionary<Tasks.CustomTask> = Object.create(null);
target.forEach((task) => {
map[task.configurationProperties.name!] = task;
});
source.forEach((task) => {
map[task.configurationProperties.name!] = task;
});
let newTarget: Tasks.CustomTask[] = [];
target.forEach(task => {
newTarget.push(map[task.configurationProperties.name!]);
delete map[task.configurationProperties.name!];
});
Object.keys(map).forEach(key => newTarget.push(map[key]));
target = newTarget;
}
return target;
}
}
interface Globals {
command?: Tasks.CommandConfiguration;
problemMatcher?: ProblemMatcher[];
promptOnClose?: boolean;
suppressTaskName?: boolean;
}
namespace Globals {
export function from(config: ExternalTaskRunnerConfiguration, context: ParseContext): Globals {
let result = fromBase(config, context);
let osGlobals: Globals | undefined = undefined;
if (config.windows && context.platform === Platform.Windows) {
osGlobals = fromBase(config.windows, context);
} else if (config.osx && context.platform === Platform.Mac) {
osGlobals = fromBase(config.osx, context);
} else if (config.linux && context.platform === Platform.Linux) {
osGlobals = fromBase(config.linux, context);
}
if (osGlobals) {
result = Globals.assignProperties(result, osGlobals);
}
let command = CommandConfiguration.from(config, context);
if (command) {
result.command = command;
}
Globals.fillDefaults(result, context);
Globals.freeze(result);
return result;
}
export function fromBase(this: void, config: BaseTaskRunnerConfiguration, context: ParseContext): Globals {
let result: Globals = {};
if (config.suppressTaskName !== undefined) {
result.suppressTaskName = !!config.suppressTaskName;
}
if (config.promptOnClose !== undefined) {
result.promptOnClose = !!config.promptOnClose;
}
if (config.problemMatcher) {
result.problemMatcher = ProblemMatcherConverter.from(config.problemMatcher, context);
}
return result;
}
export function isEmpty(value: Globals): boolean {
return !value || value.command === undefined && value.promptOnClose === undefined && value.suppressTaskName === undefined;
}
export function assignProperties(target: Globals, source: Globals): Globals {
if (isEmpty(source)) {
return target;
}
if (isEmpty(target)) {
return source;
}
assignProperty(target, source, 'promptOnClose');
assignProperty(target, source, 'suppressTaskName');
return target;
}
<|fim▁hole|> CommandConfiguration.fillDefaults(value.command, context);
if (value.suppressTaskName === undefined) {
value.suppressTaskName = false;
}
if (value.promptOnClose === undefined) {
value.promptOnClose = true;
}
}
export function freeze(value: Globals): void {
Object.freeze(value);
if (value.command) {
CommandConfiguration.freeze(value.command);
}
}
}
export namespace ExecutionEngine {
export function from(config: ExternalTaskRunnerConfiguration): Tasks.ExecutionEngine {
let runner = config.runner || config._runner;
let result: Tasks.ExecutionEngine | undefined;
if (runner) {
switch (runner) {
case 'terminal':
result = Tasks.ExecutionEngine.Terminal;
break;
case 'process':
result = Tasks.ExecutionEngine.Process;
break;
}
}
let schemaVersion = JsonSchemaVersion.from(config);
if (schemaVersion === Tasks.JsonSchemaVersion.V0_1_0) {
return result || Tasks.ExecutionEngine.Process;
} else if (schemaVersion === Tasks.JsonSchemaVersion.V2_0_0) {
return Tasks.ExecutionEngine.Terminal;
} else {
throw new Error('Shouldn\'t happen.');
}
}
}
export namespace JsonSchemaVersion {
const _default: Tasks.JsonSchemaVersion = Tasks.JsonSchemaVersion.V2_0_0;
export function from(config: ExternalTaskRunnerConfiguration): Tasks.JsonSchemaVersion {
let version = config.version;
if (!version) {
return _default;
}
switch (version) {
case '0.1.0':
return Tasks.JsonSchemaVersion.V0_1_0;
case '2.0.0':
return Tasks.JsonSchemaVersion.V2_0_0;
default:
return _default;
}
}
}
export interface ParseResult {
validationStatus: ValidationStatus;
custom: Tasks.CustomTask[];
configured: Tasks.ConfiguringTask[];
engine: Tasks.ExecutionEngine;
}
export interface IProblemReporter extends IProblemReporterBase {
}
class UUIDMap {
private last: IStringDictionary<string | string[]> | undefined;
private current: IStringDictionary<string | string[]>;
constructor(other?: UUIDMap) {
this.current = Object.create(null);
if (other) {
for (let key of Object.keys(other.current)) {
let value = other.current[key];
if (Array.isArray(value)) {
this.current[key] = value.slice();
} else {
this.current[key] = value;
}
}
}
}
public start(): void {
this.last = this.current;
this.current = Object.create(null);
}
public getUUID(identifier: string): string {
let lastValue = this.last ? this.last[identifier] : undefined;
let result: string | undefined = undefined;
if (lastValue !== undefined) {
if (Array.isArray(lastValue)) {
result = lastValue.shift();
if (lastValue.length === 0) {
delete this.last![identifier];
}
} else {
result = lastValue;
delete this.last![identifier];
}
}
if (result === undefined) {
result = UUID.generateUuid();
}
let currentValue = this.current[identifier];
if (currentValue === undefined) {
this.current[identifier] = result;
} else {
if (Array.isArray(currentValue)) {
currentValue.push(result);
} else {
let arrayValue: string[] = [currentValue];
arrayValue.push(result);
this.current[identifier] = arrayValue;
}
}
return result;
}
public finish(): void {
this.last = undefined;
}
}
class ConfigurationParser {
private workspaceFolder: IWorkspaceFolder;
private problemReporter: IProblemReporter;
private uuidMap: UUIDMap;
private platform: Platform;
constructor(workspaceFolder: IWorkspaceFolder, platform: Platform, problemReporter: IProblemReporter, uuidMap: UUIDMap) {
this.workspaceFolder = workspaceFolder;
this.platform = platform;
this.problemReporter = problemReporter;
this.uuidMap = uuidMap;
}
public run(fileConfig: ExternalTaskRunnerConfiguration): ParseResult {
let engine = ExecutionEngine.from(fileConfig);
let schemaVersion = JsonSchemaVersion.from(fileConfig);
let context: ParseContext = {
workspaceFolder: this.workspaceFolder,
problemReporter: this.problemReporter,
uuidMap: this.uuidMap,
namedProblemMatchers: {},
engine,
schemaVersion,
platform: this.platform,
taskLoadIssues: []
};
let taskParseResult = this.createTaskRunnerConfiguration(fileConfig, context);
return {
validationStatus: this.problemReporter.status,
custom: taskParseResult.custom,
configured: taskParseResult.configured,
engine
};
}
private createTaskRunnerConfiguration(fileConfig: ExternalTaskRunnerConfiguration, context: ParseContext): TaskParseResult {
let globals = Globals.from(fileConfig, context);
if (this.problemReporter.status.isFatal()) {
return { custom: [], configured: [] };
}
context.namedProblemMatchers = ProblemMatcherConverter.namedFrom(fileConfig.declares, context);
let globalTasks: Tasks.CustomTask[] | undefined = undefined;
let externalGlobalTasks: Array<ConfiguringTask | CustomTask> | undefined = undefined;
if (fileConfig.windows && context.platform === Platform.Windows) {
globalTasks = TaskParser.from(fileConfig.windows.tasks, globals, context).custom;
externalGlobalTasks = fileConfig.windows.tasks;
} else if (fileConfig.osx && context.platform === Platform.Mac) {
globalTasks = TaskParser.from(fileConfig.osx.tasks, globals, context).custom;
externalGlobalTasks = fileConfig.osx.tasks;
} else if (fileConfig.linux && context.platform === Platform.Linux) {
globalTasks = TaskParser.from(fileConfig.linux.tasks, globals, context).custom;
externalGlobalTasks = fileConfig.linux.tasks;
}
if (context.schemaVersion === Tasks.JsonSchemaVersion.V2_0_0 && globalTasks && globalTasks.length > 0 && externalGlobalTasks && externalGlobalTasks.length > 0) {
let taskContent: string[] = [];
for (let task of externalGlobalTasks) {
taskContent.push(JSON.stringify(task, null, 4));
}
context.problemReporter.error(
nls.localize(
'TaskParse.noOsSpecificGlobalTasks',
'Task version 2.0.0 doesn\'t support global OS specific tasks. Convert them to a task with a OS specific command. Affected tasks are:\n{0}', taskContent.join('\n'))
);
}
let result: TaskParseResult = { custom: [], configured: [] };
if (fileConfig.tasks) {
result = TaskParser.from(fileConfig.tasks, globals, context);
}
if (globalTasks) {
result.custom = TaskParser.assignTasks(result.custom, globalTasks);
}
if ((!result.custom || result.custom.length === 0) && (globals.command && globals.command.name)) {
let matchers: ProblemMatcher[] = ProblemMatcherConverter.from(fileConfig.problemMatcher, context);
let isBackground = fileConfig.isBackground ? !!fileConfig.isBackground : fileConfig.isWatching ? !!fileConfig.isWatching : undefined;
let name = Tasks.CommandString.value(globals.command.name);
let task: Tasks.CustomTask = new Tasks.CustomTask(
context.uuidMap.getUUID(name),
Objects.assign({} as Tasks.WorkspaceTaskSource, source, { config: { index: -1, element: fileConfig, workspaceFolder: context.workspaceFolder } }),
name,
Tasks.CUSTOMIZED_TASK_TYPE,
{
name: undefined,
runtime: undefined,
presentation: undefined,
suppressTaskName: true
},
false,
{ reevaluateOnRerun: true },
{
name: name,
identifier: name,
group: Tasks.TaskGroup.Build,
isBackground: isBackground,
problemMatchers: matchers,
}
);
let value = GroupKind.from(fileConfig.group);
if (value) {
task.configurationProperties.group = value[0];
task.configurationProperties.groupType = value[1];
} else if (fileConfig.group === 'none') {
task.configurationProperties.group = undefined;
}
CustomTask.fillGlobals(task, globals);
CustomTask.fillDefaults(task, context);
result.custom = [task];
}
result.custom = result.custom || [];
result.configured = result.configured || [];
return result;
}
}
let uuidMaps: Map<string, UUIDMap> = new Map();
export function parse(workspaceFolder: IWorkspaceFolder, platform: Platform, configuration: ExternalTaskRunnerConfiguration, logger: IProblemReporter): ParseResult {
let uuidMap = uuidMaps.get(workspaceFolder.uri.toString());
if (!uuidMap) {
uuidMap = new UUIDMap();
uuidMaps.set(workspaceFolder.uri.toString(), uuidMap);
}
try {
uuidMap.start();
return (new ConfigurationParser(workspaceFolder, platform, logger, uuidMap)).run(configuration);
} finally {
uuidMap.finish();
}
}
export function createCustomTask(contributedTask: Tasks.ContributedTask, configuredProps: Tasks.ConfiguringTask | Tasks.CustomTask): Tasks.CustomTask {
return CustomTask.createCustomTask(contributedTask, configuredProps);
}
/*
class VersionConverter {
constructor(private problemReporter: IProblemReporter) {
}
public convert(fromConfig: ExternalTaskRunnerConfiguration): ExternalTaskRunnerConfiguration {
let result: ExternalTaskRunnerConfiguration;
result.version = '2.0.0';
if (Array.isArray(fromConfig.tasks)) {
} else {
result.tasks = [];
}
return result;
}
private convertGlobalTask(fromConfig: ExternalTaskRunnerConfiguration): TaskDescription {
let command: string = this.getGlobalCommand(fromConfig);
if (!command) {
this.problemReporter.error(nls.localize('Converter.noGlobalName', 'No global command specified. Can\'t convert to 2.0.0 version.'));
return undefined;
}
let result: TaskDescription = {
taskName: command
};
if (fromConfig.isShellCommand) {
result.type = 'shell';
} else {
result.type = 'process';
result.args = fromConfig.args;
}
if (fromConfig.)
return result;
}
private getGlobalCommand(fromConfig: ExternalTaskRunnerConfiguration): string {
if (fromConfig.command) {
return fromConfig.command;
} else if (fromConfig.windows && fromConfig.windows.command) {
return fromConfig.windows.command;
} else if (fromConfig.osx && fromConfig.osx.command) {
return fromConfig.osx.command;
} else if (fromConfig.linux && fromConfig.linux.command) {
return fromConfig.linux.command;
} else {
return undefined;
}
}
private createCommandLine(command: string, args: string[], isWindows: boolean): string {
let result: string[];
let commandHasSpace = false;
let argHasSpace = false;
if (TaskDescription.hasUnescapedSpaces(command)) {
result.push(`"${command}"`);
commandHasSpace = true;
} else {
result.push(command);
}
if (args) {
for (let arg of args) {
if (TaskDescription.hasUnescapedSpaces(arg)) {
result.push(`"${arg}"`);
argHasSpace= true;
} else {
result.push(arg);
}
}
}
return result.join(' ');
}
}
*/<|fim▁end|> | export function fillDefaults(value: Globals, context: ParseContext): void {
if (!value) {
return;
} |
<|file_name|>interact.js<|end_file_name|><|fim▁begin|>/**
* interact.js v1.1.2
*
* Copyright (c) 2012, 2013, 2014 Taye Adeyemi <[email protected]>
* Open source under the MIT License.
* https://raw.github.com/taye/interact.js/master/LICENSE
*/
(function () {
'use strict';
var document = window.document,
SVGElement = window.SVGElement || blank,
SVGSVGElement = window.SVGSVGElement || blank,
SVGElementInstance = window.SVGElementInstance || blank,
HTMLElement = window.HTMLElement || window.Element,
PointerEvent = (window.PointerEvent || window.MSPointerEvent),
pEventTypes,
hypot = Math.hypot || function (x, y) { return Math.sqrt(x * x + y * y); },
tmpXY = {}, // reduce object creation in getXY()
documents = [], // all documents being listened to
interactables = [], // all set interactables
interactions = [], // all interactions
dynamicDrop = false,
// {
// type: {
// selectors: ['selector', ...],
// contexts : [document, ...],
// listeners: [[listener, useCapture], ...]
// }
// }
delegatedEvents = {},
defaultOptions = {
draggable : false,
dragAxis : 'xy',
dropzone : false,
accept : null,
dropOverlap : 'pointer',
resizable : false,
squareResize: false,
resizeAxis : 'xy',
gesturable : false,
// no more than this number of actions can target the Interactable
dragMax : 1,
resizeMax : 1,
gestureMax: 1,
// no more than this number of actions can target the same
// element of this Interactable simultaneously
dragMaxPerElement : 1,
resizeMaxPerElement : 1,
gestureMaxPerElement: 1,
pointerMoveTolerance: 1,
actionChecker: null,
styleCursor: true,
preventDefault: 'auto',
// aww snap
snap: {
mode : 'grid',
endOnly : false,
actions : ['drag'],
range : Infinity,
grid : { x: 100, y: 100 },
gridOffset : { x: 0, y: 0 },
anchors : [],
paths : [],
elementOrigin: null,
arrayTypes : /^anchors$|^paths$|^actions$/,
objectTypes : /^grid$|^gridOffset$|^elementOrigin$/,
stringTypes : /^mode$/,
numberTypes : /^range$/,
boolTypes : /^endOnly$/
},
snapEnabled: false,
restrict: {
drag: null,
resize: null,
gesture: null,
endOnly: false
},
restrictEnabled: false,
autoScroll: {
container : null, // the item that is scrolled (Window or HTMLElement)
margin : 60,
speed : 300, // the scroll speed in pixels per second
numberTypes : /^margin$|^speed$/
},
autoScrollEnabled: false,
inertia: {
resistance : 10, // the lambda in exponential decay
minSpeed : 100, // target speed must be above this for inertia to start
endSpeed : 10, // the speed at which inertia is slow enough to stop
allowResume : true, // allow resuming an action in inertia phase
zeroResumeDelta : false, // if an action is resumed after launch, set dx/dy to 0
smoothEndDuration: 300, // animate to snap/restrict endOnly if there's no inertia
actions : ['drag', 'resize'], // allow inertia on these actions. gesture might not work
numberTypes: /^resistance$|^minSpeed$|^endSpeed$|^smoothEndDuration$/,
arrayTypes : /^actions$/,
boolTypes : /^(allowResume|zeroResumeDelta)$/
},
inertiaEnabled: false,
origin : { x: 0, y: 0 },
deltaSource : 'page',
},
// Things related to autoScroll
autoScroll = {
interaction: null,
i: null, // the handle returned by window.setInterval
x: 0, y: 0, // Direction each pulse is to scroll in
// scroll the window by the values in scroll.x/y
scroll: function () {
var options = autoScroll.interaction.target.options.autoScroll,
container = options.container || getWindow(autoScroll.interaction.element),
now = new Date().getTime(),
// change in time in seconds
dt = (now - autoScroll.prevTime) / 1000,
// displacement
s = options.speed * dt;
if (s >= 1) {
if (isWindow(container)) {
container.scrollBy(autoScroll.x * s, autoScroll.y * s);
}
else if (container) {
container.scrollLeft += autoScroll.x * s;
container.scrollTop += autoScroll.y * s;
}
autoScroll.prevTime = now;
}
if (autoScroll.isScrolling) {
cancelFrame(autoScroll.i);
autoScroll.i = reqFrame(autoScroll.scroll);
}
},
edgeMove: function (event) {
var interaction,
target,
doAutoscroll = false;
for (var i = 0; i < interactions.length; i++) {
interaction = interactions[i];
target = interaction.target;
if (target && target.options.autoScrollEnabled
&& (interaction.dragging || interaction.resizing)) {
doAutoscroll = true;
break;
}
}
if (!doAutoscroll) { return; }
var top,
right,
bottom,
left,
options = target.options.autoScroll,
container = options.container || getWindow(interaction.element);
if (isWindow(container)) {
left = event.clientX < autoScroll.margin;
top = event.clientY < autoScroll.margin;
right = event.clientX > container.innerWidth - autoScroll.margin;
bottom = event.clientY > container.innerHeight - autoScroll.margin;
}
else {
var rect = getElementRect(container);
left = event.clientX < rect.left + autoScroll.margin;
top = event.clientY < rect.top + autoScroll.margin;
right = event.clientX > rect.right - autoScroll.margin;
bottom = event.clientY > rect.bottom - autoScroll.margin;
}
autoScroll.x = (right ? 1: left? -1: 0);
autoScroll.y = (bottom? 1: top? -1: 0);
if (!autoScroll.isScrolling) {
// set the autoScroll properties to those of the target
autoScroll.margin = options.margin;
autoScroll.speed = options.speed;
autoScroll.start(interaction);
}
},
isScrolling: false,
prevTime: 0,
start: function (interaction) {
autoScroll.isScrolling = true;
cancelFrame(autoScroll.i);
autoScroll.interaction = interaction;
autoScroll.prevTime = new Date().getTime();
autoScroll.i = reqFrame(autoScroll.scroll);
},
stop: function () {
autoScroll.isScrolling = false;
cancelFrame(autoScroll.i);
}
},
// Does the browser support touch input?
supportsTouch = (('ontouchstart' in window) || window.DocumentTouch && document instanceof window.DocumentTouch),
// Does the browser support PointerEvents
supportsPointerEvent = !!PointerEvent,
// Less Precision with touch input
margin = supportsTouch || supportsPointerEvent? 20: 10,
// for ignoring browser's simulated mouse events
prevTouchTime = 0,
// Allow this many interactions to happen simultaneously
maxInteractions = 1,
actionCursors = {
drag : 'move',
resizex : 'e-resize',
resizey : 's-resize',
resizexy: 'se-resize',
gesture : ''
},
actionIsEnabled = {
drag : true,
resize : true,
gesture: true
},
// because Webkit and Opera still use 'mousewheel' event type
wheelEvent = 'onmousewheel' in document? 'mousewheel': 'wheel',
eventTypes = [
'dragstart',
'dragmove',
'draginertiastart',
'dragend',
'dragenter',
'dragleave',
'dropactivate',
'dropdeactivate',
'dropmove',
'drop',
'resizestart',
'resizemove',
'resizeinertiastart',
'resizeend',
'gesturestart',
'gesturemove',
'gestureinertiastart',
'gestureend',
'down',
'move',
'up',
'cancel',
'tap',
'doubletap',
'hold'
],
globalEvents = {},
// Opera Mobile must be handled differently
isOperaMobile = navigator.appName == 'Opera' &&
supportsTouch &&
navigator.userAgent.match('Presto'),
// scrolling doesn't change the result of
// getBoundingClientRect/getClientRects on iOS <=7 but it does on iOS 8
isIOS7orLower = (/iP(hone|od|ad)/.test(navigator.platform)
&& /OS [1-7][^\d]/.test(navigator.appVersion)),
// prefix matchesSelector
prefixedMatchesSelector = 'matchesSelector' in Element.prototype?
'matchesSelector': 'webkitMatchesSelector' in Element.prototype?
'webkitMatchesSelector': 'mozMatchesSelector' in Element.prototype?
'mozMatchesSelector': 'oMatchesSelector' in Element.prototype?
'oMatchesSelector': 'msMatchesSelector',
// will be polyfill function if browser is IE8
ie8MatchesSelector,
// native requestAnimationFrame or polyfill
reqFrame = window.requestAnimationFrame,
cancelFrame = window.cancelAnimationFrame,
// Events wrapper
events = (function () {
var useAttachEvent = ('attachEvent' in window) && !('addEventListener' in window),
addEvent = useAttachEvent? 'attachEvent': 'addEventListener',
removeEvent = useAttachEvent? 'detachEvent': 'removeEventListener',
on = useAttachEvent? 'on': '',
elements = [],
targets = [],
attachedListeners = [];
function add (element, type, listener, useCapture) {
var elementIndex = indexOf(elements, element),
target = targets[elementIndex];
if (!target) {
target = {
events: {},
typeCount: 0
};
elementIndex = elements.push(element) - 1;
targets.push(target);
attachedListeners.push((useAttachEvent ? {
supplied: [],
wrapped : [],
useCount: []
} : null));
}
if (!target.events[type]) {
target.events[type] = [];
target.typeCount++;
}
if (!contains(target.events[type], listener)) {
var ret;
if (useAttachEvent) {
var listeners = attachedListeners[elementIndex],
listenerIndex = indexOf(listeners.supplied, listener);
var wrapped = listeners.wrapped[listenerIndex] || function (event) {
if (!event.immediatePropagationStopped) {
event.target = event.srcElement;
event.currentTarget = element;
event.preventDefault = event.preventDefault || preventDef;
event.stopPropagation = event.stopPropagation || stopProp;
event.stopImmediatePropagation = event.stopImmediatePropagation || stopImmProp;
if (/mouse|click/.test(event.type)) {
event.pageX = event.clientX + element.ownerDdocument.documentElement.scrollLeft;
event.pageY = event.clientY + element.ownerDdocument.documentElement.scrollTop;
}
listener(event);
}
};
ret = element[addEvent](on + type, wrapped, Boolean(useCapture));
if (listenerIndex === -1) {
listeners.supplied.push(listener);
listeners.wrapped.push(wrapped);
listeners.useCount.push(1);
}
else {
listeners.useCount[listenerIndex]++;
}
}
else {
ret = element[addEvent](type, listener, useCapture || false);
}
target.events[type].push(listener);
return ret;
}
}
function remove (element, type, listener, useCapture) {
var i,
elementIndex = indexOf(elements, element),
target = targets[elementIndex],
listeners,
listenerIndex,
wrapped = listener;
if (!target || !target.events) {
return;
}
if (useAttachEvent) {
listeners = attachedListeners[elementIndex];
listenerIndex = indexOf(listeners.supplied, listener);
wrapped = listeners.wrapped[listenerIndex];
}
if (type === 'all') {
for (type in target.events) {
if (target.events.hasOwnProperty(type)) {
remove(element, type, 'all');
}
}
return;
}
if (target.events[type]) {
var len = target.events[type].length;
if (listener === 'all') {
for (i = 0; i < len; i++) {
remove(element, type, target.events[type][i], Boolean(useCapture));
}
} else {
for (i = 0; i < len; i++) {
if (target.events[type][i] === listener) {
element[removeEvent](on + type, wrapped, useCapture || false);
target.events[type].splice(i, 1);
if (useAttachEvent && listeners) {
listeners.useCount[listenerIndex]--;
if (listeners.useCount[listenerIndex] === 0) {
listeners.supplied.splice(listenerIndex, 1);
listeners.wrapped.splice(listenerIndex, 1);
listeners.useCount.splice(listenerIndex, 1);
}
}
break;
}
}
}
if (target.events[type] && target.events[type].length === 0) {
target.events[type] = null;
target.typeCount--;
}
}
if (!target.typeCount) {
targets.splice(elementIndex);
elements.splice(elementIndex);
attachedListeners.splice(elementIndex);
}
}
function preventDef () {
this.returnValue = false;
}
function stopProp () {
this.cancelBubble = true;
}
function stopImmProp () {
this.cancelBubble = true;
this.immediatePropagationStopped = true;
}
return {
add: add,
remove: remove,
useAttachEvent: useAttachEvent,
_elements: elements,
_targets: targets,
_attachedListeners: attachedListeners
};
}());
function blank () {}
function isElement (o) {
if (!o || (typeof o !== 'object')) { return false; }
var _window = getWindow(o) || window;
return (/object|function/.test(typeof _window.Element)
? o instanceof _window.Element //DOM2
: o.nodeType === 1 && typeof o.nodeName === "string");
}
function isWindow (thing) { return !!(thing && thing.Window) && (thing instanceof thing.Window); }
function isArray (thing) {
return isObject(thing)
&& (typeof thing.length !== undefined)
&& isFunction(thing.splice);
}
function isObject (thing) { return !!thing && (typeof thing === 'object'); }
function isFunction (thing) { return typeof thing === 'function'; }
function isNumber (thing) { return typeof thing === 'number' ; }
function isBool (thing) { return typeof thing === 'boolean' ; }
function isString (thing) { return typeof thing === 'string' ; }
function trySelector (value) {
if (!isString(value)) { return false; }
// an exception will be raised if it is invalid
document.querySelector(value);
return true;
}
function extend (dest, source) {
for (var prop in source) {
dest[prop] = source[prop];
}
return dest;
}
function copyCoords (dest, src) {
dest.page = dest.page || {};
dest.page.x = src.page.x;
dest.page.y = src.page.y;
dest.client = dest.client || {};
dest.client.x = src.client.x;
dest.client.y = src.client.y;
dest.timeStamp = src.timeStamp;
}
function setEventXY (targetObj, pointer, interaction) {
if (!pointer) {
if (interaction.pointerIds.length > 1) {
pointer = touchAverage(interaction.pointers);
}
else {
pointer = interaction.pointers[0];
}
}
getPageXY(pointer, tmpXY, interaction);
targetObj.page.x = tmpXY.x;
targetObj.page.y = tmpXY.y;
getClientXY(pointer, tmpXY, interaction);
targetObj.client.x = tmpXY.x;
targetObj.client.y = tmpXY.y;
targetObj.timeStamp = new Date().getTime();
}
function setEventDeltas (targetObj, prev, cur) {
targetObj.page.x = cur.page.x - prev.page.x;
targetObj.page.y = cur.page.y - prev.page.y;
targetObj.client.x = cur.client.x - prev.client.x;
targetObj.client.y = cur.client.y - prev.client.y;
targetObj.timeStamp = new Date().getTime() - prev.timeStamp;
// set pointer velocity
var dt = Math.max(targetObj.timeStamp / 1000, 0.001);
targetObj.page.speed = hypot(targetObj.page.x, targetObj.page.y) / dt;
targetObj.page.vx = targetObj.page.x / dt;
targetObj.page.vy = targetObj.page.y / dt;
targetObj.client.speed = hypot(targetObj.client.x, targetObj.page.y) / dt;
targetObj.client.vx = targetObj.client.x / dt;
targetObj.client.vy = targetObj.client.y / dt;
}
// Get specified X/Y coords for mouse or event.touches[0]
function getXY (type, pointer, xy) {
xy = xy || {};
type = type || 'page';
xy.x = pointer[type + 'X'];
xy.y = pointer[type + 'Y'];
return xy;
}
function getPageXY (pointer, page, interaction) {
page = page || {};
if (pointer instanceof InteractEvent) {
if (/inertiastart/.test(pointer.type)) {
interaction = interaction || pointer.interaction;
extend(page, interaction.inertiaStatus.upCoords.page);
page.x += interaction.inertiaStatus.sx;
page.y += interaction.inertiaStatus.sy;
}
else {
page.x = pointer.pageX;
page.y = pointer.pageY;
}
}
// Opera Mobile handles the viewport and scrolling oddly
else if (isOperaMobile) {
getXY('screen', pointer, page);
page.x += window.scrollX;
page.y += window.scrollY;
}
else {
getXY('page', pointer, page);
}
return page;
}
function getClientXY (pointer, client, interaction) {
client = client || {};
if (pointer instanceof InteractEvent) {
if (/inertiastart/.test(pointer.type)) {
extend(client, interaction.inertiaStatus.upCoords.client);
client.x += interaction.inertiaStatus.sx;
client.y += interaction.inertiaStatus.sy;
}
else {
client.x = pointer.clientX;
client.y = pointer.clientY;
}
}
else {
// Opera Mobile handles the viewport and scrolling oddly
getXY(isOperaMobile? 'screen': 'client', pointer, client);
}
return client;
}
function getScrollXY (win) {
win = win || window;
return {
x: win.scrollX || win.document.documentElement.scrollLeft,
y: win.scrollY || win.document.documentElement.scrollTop
};
}
function getPointerId (pointer) {
return isNumber(pointer.pointerId)? pointer.pointerId : pointer.identifier;
}
function getActualElement (element) {
return (element instanceof SVGElementInstance
? element.correspondingUseElement
: element);
}
function getWindow (node) {
if (isWindow(node)) {
return node;
}
var rootNode = (node.ownerDocument || node);
return rootNode.defaultView || rootNode.parentWindow;
}
function getElementRect (element) {
var scroll = isIOS7orLower
? { x: 0, y: 0 }
: getScrollXY(getWindow(element)),
clientRect = (element instanceof SVGElement)?
element.getBoundingClientRect():
element.getClientRects()[0];
return clientRect && {
left : clientRect.left + scroll.x,
right : clientRect.right + scroll.x,
top : clientRect.top + scroll.y,
bottom: clientRect.bottom + scroll.y,
width : clientRect.width || clientRect.right - clientRect.left,
height: clientRect.heigh || clientRect.bottom - clientRect.top
};
}
function getTouchPair (event) {
var touches = [];
// array of touches is supplied
if (isArray(event)) {
touches[0] = event[0];
touches[1] = event[1];
}
// an event
else {
if (event.type === 'touchend') {
if (event.touches.length === 1) {
touches[0] = event.touches[0];
touches[1] = event.changedTouches[0];
}
else if (event.touches.length === 0) {
touches[0] = event.changedTouches[0];
touches[1] = event.changedTouches[1];
}
}
else {
touches[0] = event.touches[0];
touches[1] = event.touches[1];
}
}
return touches;
}
function touchAverage (event) {
var touches = getTouchPair(event);
return {
pageX: (touches[0].pageX + touches[1].pageX) / 2,
pageY: (touches[0].pageY + touches[1].pageY) / 2,
clientX: (touches[0].clientX + touches[1].clientX) / 2,
clientY: (touches[0].clientY + touches[1].clientY) / 2
};
}
function touchBBox (event) {
if (!event.length && !(event.touches && event.touches.length > 1)) {
return;
}
var touches = getTouchPair(event),
minX = Math.min(touches[0].pageX, touches[1].pageX),
minY = Math.min(touches[0].pageY, touches[1].pageY),
maxX = Math.max(touches[0].pageX, touches[1].pageX),
maxY = Math.max(touches[0].pageY, touches[1].pageY);
return {
x: minX,
y: minY,
left: minX,
top: minY,
width: maxX - minX,
height: maxY - minY
};
}
function touchDistance (event, deltaSource) {
deltaSource = deltaSource || defaultOptions.deltaSource;
var sourceX = deltaSource + 'X',
sourceY = deltaSource + 'Y',
touches = getTouchPair(event);
var dx = touches[0][sourceX] - touches[1][sourceX],
dy = touches[0][sourceY] - touches[1][sourceY];
return hypot(dx, dy);
}
function touchAngle (event, prevAngle, deltaSource) {
deltaSource = deltaSource || defaultOptions.deltaSource;
var sourceX = deltaSource + 'X',
sourceY = deltaSource + 'Y',
touches = getTouchPair(event),
dx = touches[0][sourceX] - touches[1][sourceX],
dy = touches[0][sourceY] - touches[1][sourceY],
angle = 180 * Math.atan(dy / dx) / Math.PI;
if (isNumber(prevAngle)) {
var dr = angle - prevAngle,
drClamped = dr % 360;
if (drClamped > 315) {
angle -= 360 + (angle / 360)|0 * 360;
}
else if (drClamped > 135) {
angle -= 180 + (angle / 360)|0 * 360;
}
else if (drClamped < -315) {
angle += 360 + (angle / 360)|0 * 360;
}
else if (drClamped < -135) {
angle += 180 + (angle / 360)|0 * 360;
}
}
return angle;
}
function getOriginXY (interactable, element) {
var origin = interactable
? interactable.options.origin
: defaultOptions.origin;
if (origin === 'parent') {
origin = element.parentNode;
}
else if (origin === 'self') {
origin = interactable.getRect(element);
}
else if (trySelector(origin)) {
origin = closest(element, origin) || { x: 0, y: 0 };
}
if (isFunction(origin)) {
origin = origin(interactable && element);
}
if (isElement(origin)) {
origin = getElementRect(origin);
}
origin.x = ('x' in origin)? origin.x : origin.left;
origin.y = ('y' in origin)? origin.y : origin.top;
return origin;
}
// http://stackoverflow.com/a/5634528/2280888
function _getQBezierValue(t, p1, p2, p3) {
var iT = 1 - t;
return iT * iT * p1 + 2 * iT * t * p2 + t * t * p3;
}
function getQuadraticCurvePoint(startX, startY, cpX, cpY, endX, endY, position) {
return {
x: _getQBezierValue(position, startX, cpX, endX),
y: _getQBezierValue(position, startY, cpY, endY)
};
}
// http://gizma.com/easing/
function easeOutQuad (t, b, c, d) {
t /= d;<|fim▁hole|> return -c * t*(t-2) + b;
}
function nodeContains (parent, child) {
while ((child = child.parentNode)) {
if (child === parent) {
return true;
}
}
return false;
}
function closest (child, selector) {
var parent = child.parentNode;
while (isElement(parent)) {
if (matchesSelector(parent, selector)) { return parent; }
parent = parent.parentNode;
}
return null;
}
function inContext (interactable, element) {
return interactable._context === element.ownerDocument
|| nodeContains(interactable._context, element);
}
function testIgnore (interactable, interactableElement, element) {
var ignoreFrom = interactable.options.ignoreFrom;
if (!ignoreFrom
// limit test to the interactable's element and its children
|| !isElement(element) || element === interactableElement.parentNode) {
return false;
}
if (isString(ignoreFrom)) {
return matchesSelector(element, ignoreFrom) || testIgnore(interactable, element.parentNode);
}
else if (isElement(ignoreFrom)) {
return element === ignoreFrom || nodeContains(ignoreFrom, element);
}
return false;
}
function testAllow (interactable, interactableElement, element) {
var allowFrom = interactable.options.allowFrom;
if (!allowFrom) { return true; }
// limit test to the interactable's element and its children
if (!isElement(element) || element === interactableElement.parentNode) {
return false;
}
if (isString(allowFrom)) {
return matchesSelector(element, allowFrom) || testAllow(interactable, element.parentNode);
}
else if (isElement(allowFrom)) {
return element === allowFrom || nodeContains(allowFrom, element);
}
return false;
}
function checkAxis (axis, interactable) {
if (!interactable) { return false; }
var thisAxis = interactable.options.dragAxis;
return (axis === 'xy' || thisAxis === 'xy' || thisAxis === axis);
}
function checkSnap (interactable, action) {
var options = interactable.options;
if (/^resize/.test(action)) {
action = 'resize';
}
return action !== 'gesture' && options.snapEnabled && contains(options.snap.actions, action);
}
function checkRestrict (interactable, action) {
var options = interactable.options;
if (/^resize/.test(action)) {
action = 'resize';
}
return options.restrictEnabled && options.restrict[action];
}
function withinInteractionLimit (interactable, element, action) {
action = /resize/.test(action)? 'resize': action;
var options = interactable.options,
maxActions = options[action + 'Max'],
maxPerElement = options[action + 'MaxPerElement'],
activeInteractions = 0,
targetCount = 0,
targetElementCount = 0;
for (var i = 0, len = interactions.length; i < len; i++) {
var interaction = interactions[i],
otherAction = /resize/.test(interaction.prepared)? 'resize': interaction.prepared,
active = interaction.interacting();
if (!active) { continue; }
activeInteractions++;
if (activeInteractions >= maxInteractions) {
return false;
}
if (interaction.target !== interactable) { continue; }
targetCount += (otherAction === action)|0;
if (targetCount >= maxActions) {
return false;
}
if (interaction.element === element) {
targetElementCount++;
if (otherAction !== action || targetElementCount >= maxPerElement) {
return false;
}
}
}
return maxInteractions > 0;
}
// Test for the element that's "above" all other qualifiers
function indexOfDeepestElement (elements) {
var dropzone,
deepestZone = elements[0],
index = deepestZone? 0: -1,
parent,
deepestZoneParents = [],
dropzoneParents = [],
child,
i,
n;
for (i = 1; i < elements.length; i++) {
dropzone = elements[i];
// an element might belong to multiple selector dropzones
if (!dropzone || dropzone === deepestZone) {
continue;
}
if (!deepestZone) {
deepestZone = dropzone;
index = i;
continue;
}
// check if the deepest or current are document.documentElement or document.rootElement
// - if the current dropzone is, do nothing and continue
if (dropzone.parentNode === dropzone.ownerDocument) {
continue;
}
// - if deepest is, update with the current dropzone and continue to next
else if (deepestZone.parentNode === dropzone.ownerDocument) {
deepestZone = dropzone;
index = i;
continue;
}
if (!deepestZoneParents.length) {
parent = deepestZone;
while (parent.parentNode && parent.parentNode !== parent.ownerDocument) {
deepestZoneParents.unshift(parent);
parent = parent.parentNode;
}
}
// if this element is an svg element and the current deepest is
// an HTMLElement
if (deepestZone instanceof HTMLElement
&& dropzone instanceof SVGElement
&& !(dropzone instanceof SVGSVGElement)) {
if (dropzone === deepestZone.parentNode) {
continue;
}
parent = dropzone.ownerSVGElement;
}
else {
parent = dropzone;
}
dropzoneParents = [];
while (parent.parentNode !== parent.ownerDocument) {
dropzoneParents.unshift(parent);
parent = parent.parentNode;
}
n = 0;
// get (position of last common ancestor) + 1
while (dropzoneParents[n] && dropzoneParents[n] === deepestZoneParents[n]) {
n++;
}
var parents = [
dropzoneParents[n - 1],
dropzoneParents[n],
deepestZoneParents[n]
];
child = parents[0].lastChild;
while (child) {
if (child === parents[1]) {
deepestZone = dropzone;
index = i;
deepestZoneParents = [];
break;
}
else if (child === parents[2]) {
break;
}
child = child.previousSibling;
}
}
return index;
}
function Interaction () {
this.target = null; // current interactable being interacted with
this.element = null; // the target element of the interactable
this.dropTarget = null; // the dropzone a drag target might be dropped into
this.dropElement = null; // the element at the time of checking
this.prevDropTarget = null; // the dropzone that was recently dragged away from
this.prevDropElement = null; // the element at the time of checking
this.prepared = null; // Action that's ready to be fired on next move event
this.matches = []; // all selectors that are matched by target element
this.matchElements = []; // corresponding elements
this.inertiaStatus = {
active : false,
smoothEnd : false,
startEvent: null,
upCoords: {},
xe: 0, ye: 0,
sx: 0, sy: 0,
t0: 0,
vx0: 0, vys: 0,
duration: 0,
resumeDx: 0,
resumeDy: 0,
lambda_v0: 0,
one_ve_v0: 0,
i : null
};
if (isFunction(Function.prototype.bind)) {
this.boundInertiaFrame = this.inertiaFrame.bind(this);
this.boundSmoothEndFrame = this.smoothEndFrame.bind(this);
}
else {
var that = this;
this.boundInertiaFrame = function () { return that.inertiaFrame(); };
this.boundSmoothEndFrame = function () { return that.smoothEndFrame(); };
}
this.activeDrops = {
dropzones: [], // the dropzones that are mentioned below
elements : [], // elements of dropzones that accept the target draggable
rects : [] // the rects of the elements mentioned above
};
// keep track of added pointers
this.pointers = [];
this.pointerIds = [];
this.downTargets = [];
this.downTimes = [];
this.holdTimers = [];
// Previous native pointer move event coordinates
this.prevCoords = {
page : { x: 0, y: 0 },
client : { x: 0, y: 0 },
timeStamp: 0
};
// current native pointer move event coordinates
this.curCoords = {
page : { x: 0, y: 0 },
client : { x: 0, y: 0 },
timeStamp: 0
};
// Starting InteractEvent pointer coordinates
this.startCoords = {
page : { x: 0, y: 0 },
client : { x: 0, y: 0 },
timeStamp: 0
};
// Change in coordinates and time of the pointer
this.pointerDelta = {
page : { x: 0, y: 0, vx: 0, vy: 0, speed: 0 },
client : { x: 0, y: 0, vx: 0, vy: 0, speed: 0 },
timeStamp: 0
};
this.downEvent = null; // pointerdown/mousedown/touchstart event
this.downPointer = {};
this.prevEvent = null; // previous action event
this.tapTime = 0; // time of the most recent tap event
this.prevTap = null;
this.startOffset = { left: 0, right: 0, top: 0, bottom: 0 };
this.restrictOffset = { left: 0, right: 0, top: 0, bottom: 0 };
this.snapOffset = { x: 0, y: 0};
this.gesture = {
start: { x: 0, y: 0 },
startDistance: 0, // distance between two touches of touchStart
prevDistance : 0,
distance : 0,
scale: 1, // gesture.distance / gesture.startDistance
startAngle: 0, // angle of line joining two touches
prevAngle : 0 // angle of the previous gesture event
};
this.snapStatus = {
x : 0, y : 0,
dx : 0, dy : 0,
realX : 0, realY : 0,
snappedX: 0, snappedY: 0,
anchors : [],
paths : [],
locked : false,
changed : false
};
this.restrictStatus = {
dx : 0, dy : 0,
restrictedX: 0, restrictedY: 0,
snap : null,
restricted : false,
changed : false
};
this.restrictStatus.snap = this.snapStatus;
this.pointerIsDown = false;
this.pointerWasMoved = false;
this.gesturing = false;
this.dragging = false;
this.resizing = false;
this.resizeAxes = 'xy';
this.mouse = false;
interactions.push(this);
}
Interaction.prototype = {
getPageXY : function (pointer, xy) { return getPageXY(pointer, xy, this); },
getClientXY: function (pointer, xy) { return getClientXY(pointer, xy, this); },
setEventXY : function (target, ptr) { return setEventXY(target, ptr, this); },
pointerOver: function (pointer, event, eventTarget) {
if (this.prepared || !this.mouse) { return; }
var curMatches = [],
curMatchElements = [],
prevTargetElement = this.element;
this.addPointer(pointer);
if (this.target
&& (testIgnore(this.target, this.element, eventTarget)
|| !testAllow(this.target, this.element, eventTarget)
|| !withinInteractionLimit(this.target, this.element, this.prepared))) {
// if the eventTarget should be ignored or shouldn't be allowed
// clear the previous target
this.target = null;
this.element = null;
this.matches = [];
this.matchElements = [];
}
var elementInteractable = interactables.get(eventTarget),
elementAction = (elementInteractable
&& !testIgnore(elementInteractable, eventTarget, eventTarget)
&& testAllow(elementInteractable, eventTarget, eventTarget)
&& validateAction(
elementInteractable.getAction(pointer, this, eventTarget),
elementInteractable));
elementAction = elementInteractable && withinInteractionLimit(elementInteractable, eventTarget, elementAction)
? elementAction
: null;
function pushCurMatches (interactable, selector) {
if (interactable
&& inContext(interactable, eventTarget)
&& !testIgnore(interactable, eventTarget, eventTarget)
&& testAllow(interactable, eventTarget, eventTarget)
&& matchesSelector(eventTarget, selector)) {
curMatches.push(interactable);
curMatchElements.push(eventTarget);
}
}
if (elementAction) {
this.target = elementInteractable;
this.element = eventTarget;
this.matches = [];
this.matchElements = [];
}
else {
interactables.forEachSelector(pushCurMatches);
if (this.validateSelector(pointer, curMatches, curMatchElements)) {
this.matches = curMatches;
this.matchElements = curMatchElements;
this.pointerHover(pointer, event, this.matches, this.matchElements);
events.add(eventTarget,
PointerEvent? pEventTypes.move : 'mousemove',
listeners.pointerHover);
}
else if (this.target) {
if (nodeContains(prevTargetElement, eventTarget)) {
this.pointerHover(pointer, event, this.matches, this.matchElements);
events.add(this.element,
PointerEvent? pEventTypes.move : 'mousemove',
listeners.pointerHover);
}
else {
this.target = null;
this.element = null;
this.matches = [];
this.matchElements = [];
}
}
}
},
// Check what action would be performed on pointerMove target if a mouse
// button were pressed and change the cursor accordingly
pointerHover: function (pointer, event, eventTarget, curEventTarget, matches, matchElements) {
var target = this.target;
if (!this.prepared && this.mouse) {
var action;
// update pointer coords for defaultActionChecker to use
this.setEventXY(this.curCoords, pointer);
if (matches) {
action = this.validateSelector(pointer, matches, matchElements);
}
else if (target) {
action = validateAction(target.getAction(this.pointers[0], this, this.element), this.target);
}
if (target && target.options.styleCursor) {
if (action) {
target._doc.documentElement.style.cursor = actionCursors[action];
}
else {
target._doc.documentElement.style.cursor = '';
}
}
}
else if (this.prepared) {
this.checkAndPreventDefault(event, target, this.element);
}
},
pointerOut: function (pointer, event, eventTarget) {
if (this.prepared) { return; }
// Remove temporary event listeners for selector Interactables
if (!interactables.get(eventTarget)) {
events.remove(eventTarget,
PointerEvent? pEventTypes.move : 'mousemove',
listeners.pointerHover);
}
if (this.target && this.target.options.styleCursor && !this.interacting()) {
this.target._doc.documentElement.style.cursor = '';
}
},
selectorDown: function (pointer, event, eventTarget, curEventTarget) {
var that = this,
element = eventTarget,
pointerIndex = this.addPointer(pointer),
action;
this.collectEventTargets(pointer, event, eventTarget, 'down');
this.holdTimers[pointerIndex] = window.setTimeout(function () {
that.pointerHold(pointer, event, eventTarget, curEventTarget);
}, 600);
this.pointerIsDown = true;
// Check if the down event hits the current inertia target
if (this.inertiaStatus.active && this.target.selector) {
// climb up the DOM tree from the event target
while (element && element !== element.ownerDocument) {
// if this element is the current inertia target element
if (element === this.element
// and the prospective action is the same as the ongoing one
&& validateAction(this.target.getAction(pointer, this, this.element), this.target) === this.prepared) {
// stop inertia so that the next move will be a normal one
cancelFrame(this.inertiaStatus.i);
this.inertiaStatus.active = false;
return;
}
element = element.parentNode;
}
}
// do nothing if interacting
if (this.interacting()) {
return;
}
function pushMatches (interactable, selector, context) {
var elements = ie8MatchesSelector
? context.querySelectorAll(selector)
: undefined;
if (inContext(interactable, element)
&& !testIgnore(interactable, element, eventTarget)
&& testAllow(interactable, element, eventTarget)
&& matchesSelector(element, selector, elements)) {
that.matches.push(interactable);
that.matchElements.push(element);
}
}
// update pointer coords for defaultActionChecker to use
this.setEventXY(this.curCoords, pointer);
if (this.matches.length && this.mouse) {
action = this.validateSelector(pointer, this.matches, this.matchElements);
}
else {
while (element && element !== element.ownerDocument && !action) {
this.matches = [];
this.matchElements = [];
interactables.forEachSelector(pushMatches);
action = this.validateSelector(pointer, this.matches, this.matchElements);
element = element.parentNode;
}
}
if (action) {
this.prepared = action;
return this.pointerDown(pointer, event, eventTarget, curEventTarget, action);
}
else {
// do these now since pointerDown isn't being called from here
this.downTimes[pointerIndex] = new Date().getTime();
this.downTargets[pointerIndex] = eventTarget;
this.downEvent = event;
extend(this.downPointer, pointer);
copyCoords(this.prevCoords, this.curCoords);
this.pointerWasMoved = false;
}
},
// Determine action to be performed on next pointerMove and add appropriate
// style and event Listeners
pointerDown: function (pointer, event, eventTarget, curEventTarget, forceAction) {
if (!forceAction && !this.inertiaStatus.active && this.pointerWasMoved && this.prepared) {
this.checkAndPreventDefault(event, this.target, this.element);
return;
}
this.pointerIsDown = true;
var pointerIndex = this.addPointer(pointer),
action;
// If it is the second touch of a multi-touch gesture, keep the target
// the same if a target was set by the first touch
// Otherwise, set the target if there is no action prepared
if ((this.pointerIds.length < 2 && !this.target) || !this.prepared) {
var interactable = interactables.get(curEventTarget);
if (interactable
&& !testIgnore(interactable, curEventTarget, eventTarget)
&& testAllow(interactable, curEventTarget, eventTarget)
&& (action = validateAction(forceAction || interactable.getAction(pointer, this), interactable, eventTarget))
&& withinInteractionLimit(interactable, curEventTarget, action)) {
this.target = interactable;
this.element = curEventTarget;
}
}
var target = this.target,
options = target && target.options;
if (target && !this.interacting()) {
action = action || validateAction(forceAction || target.getAction(pointer, this), target, this.element);
this.setEventXY(this.startCoords);
if (!action) { return; }
if (options.styleCursor) {
target._doc.documentElement.style.cursor = actionCursors[action];
}
this.resizeAxes = action === 'resizexy'?
'xy':
action === 'resizex'?
'x':
action === 'resizey'?
'y':
'';
if (action === 'gesture' && this.pointerIds.length < 2) {
action = null;
}
this.prepared = action;
this.snapStatus.snappedX = this.snapStatus.snappedY =
this.restrictStatus.restrictedX = this.restrictStatus.restrictedY = NaN;
this.downTimes[pointerIndex] = new Date().getTime();
this.downTargets[pointerIndex] = eventTarget;
this.downEvent = event;
extend(this.downPointer, pointer);
this.setEventXY(this.prevCoords);
this.pointerWasMoved = false;
this.checkAndPreventDefault(event, target, this.element);
}
// if inertia is active try to resume action
else if (this.inertiaStatus.active
&& curEventTarget === this.element
&& validateAction(target.getAction(pointer, this, this.element), target) === this.prepared) {
cancelFrame(this.inertiaStatus.i);
this.inertiaStatus.active = false;
this.checkAndPreventDefault(event, target, this.element);
}
},
pointerMove: function (pointer, event, eventTarget, curEventTarget, preEnd) {
this.recordPointer(pointer);
this.setEventXY(this.curCoords, (pointer instanceof InteractEvent)
? this.inertiaStatus.startEvent
: undefined);
var duplicateMove = (this.curCoords.page.x === this.prevCoords.page.x
&& this.curCoords.page.y === this.prevCoords.page.y
&& this.curCoords.client.x === this.prevCoords.client.x
&& this.curCoords.client.y === this.prevCoords.client.y);
var dx, dy,
pointerIndex = this.mouse? 0 : indexOf(this.pointerIds, getPointerId(pointer));
// register movement greater than pointerMoveTolerance
if (this.pointerIsDown && !this.pointerWasMoved) {
dx = this.curCoords.client.x - this.startCoords.client.x;
dy = this.curCoords.client.y - this.startCoords.client.y;
this.pointerWasMoved = hypot(dx, dy) > defaultOptions.pointerMoveTolerance;
}
if (!duplicateMove && (!this.pointerIsDown || this.pointerWasMoved)) {
if (this.pointerIsDown) {
window.clearTimeout(this.holdTimers[pointerIndex]);
}
this.collectEventTargets(pointer, event, eventTarget, 'move');
}
if (!this.pointerIsDown) { return; }
if (duplicateMove && this.pointerWasMoved && !preEnd) {
this.checkAndPreventDefault(event, this.target, this.element);
return;
}
// set pointer coordinate, time changes and speeds
setEventDeltas(this.pointerDelta, this.prevCoords, this.curCoords);
if (!this.prepared) { return; }
if (this.pointerWasMoved
// ignore movement while inertia is active
&& (!this.inertiaStatus.active || (pointer instanceof InteractEvent && /inertiastart/.test(pointer.type)))) {
// if just starting an action, calculate the pointer speed now
if (!this.interacting()) {
setEventDeltas(this.pointerDelta, this.prevCoords, this.curCoords);
// check if a drag is in the correct axis
if (this.prepared === 'drag') {
var absX = Math.abs(dx),
absY = Math.abs(dy),
targetAxis = this.target.options.dragAxis,
axis = (absX > absY ? 'x' : absX < absY ? 'y' : 'xy');
// if the movement isn't in the axis of the interactable
if (axis !== 'xy' && targetAxis !== 'xy' && targetAxis !== axis) {
// cancel the prepared action
this.prepared = null;
// then try to get a drag from another ineractable
var element = eventTarget;
// check element interactables
while (element && element !== element.ownerDocument) {
var elementInteractable = interactables.get(element);
if (elementInteractable
&& elementInteractable !== this.target
&& elementInteractable.getAction(this.downPointer, this, element) === 'drag'
&& checkAxis(axis, elementInteractable)) {
this.prepared = 'drag';
this.target = elementInteractable;
this.element = element;
break;
}
element = element.parentNode;
}
// if there's no drag from element interactables,
// check the selector interactables
if (!this.prepared) {
var getDraggable = function (interactable, selector, context) {
var elements = ie8MatchesSelector
? context.querySelectorAll(selector)
: undefined;
if (interactable === this.target) { return; }
if (inContext(interactable, eventTarget)
&& !testIgnore(interactable, element, eventTarget)
&& testAllow(interactable, element, eventTarget)
&& matchesSelector(element, selector, elements)
&& interactable.getAction(this.downPointer, this, element) === 'drag'
&& checkAxis(axis, interactable)
&& withinInteractionLimit(interactable, element, 'drag')) {
return interactable;
}
};
element = eventTarget;
while (element && element !== element.ownerDocument) {
var selectorInteractable = interactables.forEachSelector(getDraggable);
if (selectorInteractable) {
this.prepared = 'drag';
this.target = selectorInteractable;
this.element = element;
break;
}
element = element.parentNode;
}
}
}
}
}
var starting = !!this.prepared && !this.interacting();
if (starting && !withinInteractionLimit(this.target, this.element, this.prepared)) {
this.stop();
return;
}
if (this.prepared && this.target) {
var target = this.target,
shouldMove = true,
shouldSnap = checkSnap(target, this.prepared) && (!target.options.snap.endOnly || preEnd),
shouldRestrict = checkRestrict(target, this.prepared) && (!target.options.restrict.endOnly || preEnd);
if (starting) {
var rect = target.getRect(this.element),
snap = target.options.snap,
restrict = target.options.restrict,
width, height;
if (rect) {
this.startOffset.left = this.startCoords.page.x - rect.left;
this.startOffset.top = this.startCoords.page.y - rect.top;
this.startOffset.right = rect.right - this.startCoords.page.x;
this.startOffset.bottom = rect.bottom - this.startCoords.page.y;
if ('width' in rect) { width = rect.width; }
else { width = rect.right - rect.left; }
if ('height' in rect) { height = rect.height; }
else { height = rect.bottom - rect.top; }
}
else {
this.startOffset.left = this.startOffset.top = this.startOffset.right = this.startOffset.bottom = 0;
}
if (rect && snap.elementOrigin) {
this.snapOffset.x = this.startOffset.left - (width * snap.elementOrigin.x);
this.snapOffset.y = this.startOffset.top - (height * snap.elementOrigin.y);
}
else {
this.snapOffset.x = this.snapOffset.y = 0;
}
if (rect && restrict.elementRect) {
this.restrictOffset.left = this.startOffset.left - (width * restrict.elementRect.left);
this.restrictOffset.top = this.startOffset.top - (height * restrict.elementRect.top);
this.restrictOffset.right = this.startOffset.right - (width * (1 - restrict.elementRect.right));
this.restrictOffset.bottom = this.startOffset.bottom - (height * (1 - restrict.elementRect.bottom));
}
else {
this.restrictOffset.left = this.restrictOffset.top = this.restrictOffset.right = this.restrictOffset.bottom = 0;
}
}
var snapCoords = starting? this.startCoords.page : this.curCoords.page;
if (shouldSnap ) { this.setSnapping (snapCoords); } else { this.snapStatus .locked = false; }
if (shouldRestrict) { this.setRestriction(snapCoords); } else { this.restrictStatus.restricted = false; }
if (shouldSnap && this.snapStatus.locked && !this.snapStatus.changed) {
shouldMove = shouldRestrict && this.restrictStatus.restricted && this.restrictStatus.changed;
}
else if (shouldRestrict && this.restrictStatus.restricted && !this.restrictStatus.changed) {
shouldMove = false;
}
// move if snapping or restriction doesn't prevent it
if (shouldMove) {
var action = /resize/.test(this.prepared)? 'resize': this.prepared;
if (starting) {
var dragStartEvent = this[action + 'Start'](this.downEvent);
this.prevEvent = dragStartEvent;
// reset active dropzones
this.activeDrops.dropzones = [];
this.activeDrops.elements = [];
this.activeDrops.rects = [];
if (!this.dynamicDrop) {
this.setActiveDrops(this.element);
}
var dropEvents = this.getDropEvents(event, dragStartEvent);
if (dropEvents.activate) {
this.fireActiveDrops(dropEvents.activate);
}
snapCoords = this.curCoords.page;
// set snapping and restriction for the move event
if (shouldSnap ) { this.setSnapping (snapCoords); }
if (shouldRestrict) { this.setRestriction(snapCoords); }
}
this.prevEvent = this[action + 'Move'](event);
}
this.checkAndPreventDefault(event, this.target, this.element);
}
}
copyCoords(this.prevCoords, this.curCoords);
if (this.dragging || this.resizing) {
autoScroll.edgeMove(event);
}
},
dragStart: function (event) {
var dragEvent = new InteractEvent(this, event, 'drag', 'start', this.element);
this.dragging = true;
this.target.fire(dragEvent);
return dragEvent;
},
dragMove: function (event) {
var target = this.target,
dragEvent = new InteractEvent(this, event, 'drag', 'move', this.element),
draggableElement = this.element,
drop = this.getDrop(dragEvent, draggableElement);
this.dropTarget = drop.dropzone;
this.dropElement = drop.element;
var dropEvents = this.getDropEvents(event, dragEvent);
target.fire(dragEvent);
if (dropEvents.leave) { this.prevDropTarget.fire(dropEvents.leave); }
if (dropEvents.enter) { this.dropTarget.fire(dropEvents.enter); }
if (dropEvents.move ) { this.dropTarget.fire(dropEvents.move ); }
this.prevDropTarget = this.dropTarget;
this.prevDropElement = this.dropElement;
return dragEvent;
},
resizeStart: function (event) {
var resizeEvent = new InteractEvent(this, event, 'resize', 'start', this.element);
this.target.fire(resizeEvent);
this.resizing = true;
return resizeEvent;
},
resizeMove: function (event) {
var resizeEvent = new InteractEvent(this, event, 'resize', 'move', this.element);
this.target.fire(resizeEvent);
return resizeEvent;
},
gestureStart: function (event) {
var gestureEvent = new InteractEvent(this, event, 'gesture', 'start', this.element);
gestureEvent.ds = 0;
this.gesture.startDistance = this.gesture.prevDistance = gestureEvent.distance;
this.gesture.startAngle = this.gesture.prevAngle = gestureEvent.angle;
this.gesture.scale = 1;
this.gesturing = true;
this.target.fire(gestureEvent);
return gestureEvent;
},
gestureMove: function (event) {
if (!this.pointerIds.length) {
return this.prevEvent;
}
var gestureEvent;
gestureEvent = new InteractEvent(this, event, 'gesture', 'move', this.element);
gestureEvent.ds = gestureEvent.scale - this.gesture.scale;
this.target.fire(gestureEvent);
this.gesture.prevAngle = gestureEvent.angle;
this.gesture.prevDistance = gestureEvent.distance;
if (gestureEvent.scale !== Infinity &&
gestureEvent.scale !== null &&
gestureEvent.scale !== undefined &&
!isNaN(gestureEvent.scale)) {
this.gesture.scale = gestureEvent.scale;
}
return gestureEvent;
},
pointerHold: function (pointer, event, eventTarget) {
this.collectEventTargets(pointer, event, eventTarget, 'hold');
},
pointerUp: function (pointer, event, eventTarget, curEventTarget) {
var pointerIndex = this.mouse? 0 : indexOf(this.pointerIds, getPointerId(pointer));
window.clearTimeout(this.holdTimers[pointerIndex]);
this.collectEventTargets(pointer, event, eventTarget, 'up' );
this.collectEventTargets(pointer, event, eventTarget, 'tap');
this.pointerEnd(pointer, event, eventTarget, curEventTarget);
this.removePointer(pointer);
},
pointerCancel: function (pointer, event, eventTarget, curEventTarget) {
var pointerIndex = this.mouse? 0 : indexOf(this.pointerIds, getPointerId(pointer));
window.clearTimeout(this.holdTimers[pointerIndex]);
this.collectEventTargets(pointer, event, eventTarget, 'cancel');
this.pointerEnd(pointer, event, eventTarget, curEventTarget);
},
// End interact move events and stop auto-scroll unless inertia is enabled
pointerEnd: function (pointer, event, eventTarget, curEventTarget) {
var endEvent,
target = this.target,
options = target && target.options,
inertiaOptions = options && options.inertia,
inertiaStatus = this.inertiaStatus;
if (this.interacting()) {
if (inertiaStatus.active) { return; }
var pointerSpeed,
now = new Date().getTime(),
inertiaPossible = false,
inertia = false,
smoothEnd = false,
endSnap = checkSnap(target, this.prepared) && options.snap.endOnly,
endRestrict = checkRestrict(target, this.prepared) && options.restrict.endOnly,
dx = 0,
dy = 0,
startEvent;
if (this.dragging) {
if (options.dragAxis === 'x' ) { pointerSpeed = Math.abs(this.pointerDelta.client.vx); }
else if (options.dragAxis === 'y' ) { pointerSpeed = Math.abs(this.pointerDelta.client.vy); }
else /*options.dragAxis === 'xy'*/{ pointerSpeed = this.pointerDelta.client.speed; }
}
// check if inertia should be started
inertiaPossible = (options.inertiaEnabled
&& this.prepared !== 'gesture'
&& contains(inertiaOptions.actions, this.prepared)
&& event !== inertiaStatus.startEvent);
inertia = (inertiaPossible
&& (now - this.curCoords.timeStamp) < 50
&& pointerSpeed > inertiaOptions.minSpeed
&& pointerSpeed > inertiaOptions.endSpeed);
if (inertiaPossible && !inertia && (endSnap || endRestrict)) {
var snapRestrict = {};
snapRestrict.snap = snapRestrict.restrict = snapRestrict;
if (endSnap) {
this.setSnapping(this.curCoords.page, snapRestrict);
if (snapRestrict.locked) {
dx += snapRestrict.dx;
dy += snapRestrict.dy;
}
}
if (endRestrict) {
this.setRestriction(this.curCoords.page, snapRestrict);
if (snapRestrict.restricted) {
dx += snapRestrict.dx;
dy += snapRestrict.dy;
}
}
if (dx || dy) {
smoothEnd = true;
}
}
if (inertia || smoothEnd) {
copyCoords(inertiaStatus.upCoords, this.curCoords);
this.pointers[0] = inertiaStatus.startEvent = startEvent =
new InteractEvent(this, event, this.prepared, 'inertiastart', this.element);
inertiaStatus.t0 = now;
target.fire(inertiaStatus.startEvent);
if (inertia) {
inertiaStatus.vx0 = this.pointerDelta.client.vx;
inertiaStatus.vy0 = this.pointerDelta.client.vy;
inertiaStatus.v0 = pointerSpeed;
this.calcInertia(inertiaStatus);
var page = extend({}, this.curCoords.page),
origin = getOriginXY(target, this.element),
statusObject;
page.x = page.x + inertiaStatus.xe - origin.x;
page.y = page.y + inertiaStatus.ye - origin.y;
statusObject = {
useStatusXY: true,
x: page.x,
y: page.y,
dx: 0,
dy: 0,
snap: null
};
statusObject.snap = statusObject;
dx = dy = 0;
if (endSnap) {
var snap = this.setSnapping(this.curCoords.page, statusObject);
if (snap.locked) {
dx += snap.dx;
dy += snap.dy;
}
}
if (endRestrict) {
var restrict = this.setRestriction(this.curCoords.page, statusObject);
if (restrict.restricted) {
dx += restrict.dx;
dy += restrict.dy;
}
}
inertiaStatus.modifiedXe += dx;
inertiaStatus.modifiedYe += dy;
inertiaStatus.i = reqFrame(this.boundInertiaFrame);
}
else {
inertiaStatus.smoothEnd = true;
inertiaStatus.xe = dx;
inertiaStatus.ye = dy;
inertiaStatus.sx = inertiaStatus.sy = 0;
inertiaStatus.i = reqFrame(this.boundSmoothEndFrame);
}
inertiaStatus.active = true;
return;
}
if (endSnap || endRestrict) {
// fire a move event at the snapped coordinates
this.pointerMove(pointer, event, eventTarget, curEventTarget, true);
}
}
if (this.dragging) {
endEvent = new InteractEvent(this, event, 'drag', 'end', this.element);
var draggableElement = this.element,
drop = this.getDrop(endEvent, draggableElement);
this.dropTarget = drop.dropzone;
this.dropElement = drop.element;
var dropEvents = this.getDropEvents(event, endEvent);
if (dropEvents.leave) { this.prevDropTarget.fire(dropEvents.leave); }
if (dropEvents.enter) { this.dropTarget.fire(dropEvents.enter); }
if (dropEvents.drop ) { this.dropTarget.fire(dropEvents.drop ); }
if (dropEvents.deactivate) {
this.fireActiveDrops(dropEvents.deactivate);
}
target.fire(endEvent);
}
else if (this.resizing) {
endEvent = new InteractEvent(this, event, 'resize', 'end', this.element);
target.fire(endEvent);
}
else if (this.gesturing) {
endEvent = new InteractEvent(this, event, 'gesture', 'end', this.element);
target.fire(endEvent);
}
this.stop(event);
},
collectDrops: function (element) {
var drops = [],
elements = [],
i;
element = element || this.element;
// collect all dropzones and their elements which qualify for a drop
for (i = 0; i < interactables.length; i++) {
if (!interactables[i].options.dropzone) { continue; }
var current = interactables[i];
// test the draggable element against the dropzone's accept setting
if ((isElement(current.options.accept) && current.options.accept !== element)
|| (isString(current.options.accept)
&& !matchesSelector(element, current.options.accept))) {
continue;
}
// query for new elements if necessary
var dropElements = current.selector? current._context.querySelectorAll(current.selector) : [current._element];
for (var j = 0, len = dropElements.length; j < len; j++) {
var currentElement = dropElements[j];
if (currentElement === element) {
continue;
}
drops.push(current);
elements.push(currentElement);
}
}
return {
dropzones: drops,
elements: elements
};
},
fireActiveDrops: function (event) {
var i,
current,
currentElement,
prevElement;
// loop through all active dropzones and trigger event
for (i = 0; i < this.activeDrops.dropzones.length; i++) {
current = this.activeDrops.dropzones[i];
currentElement = this.activeDrops.elements [i];
// prevent trigger of duplicate events on same element
if (currentElement !== prevElement) {
// set current element as event target
event.target = currentElement;
current.fire(event);
}
prevElement = currentElement;
}
},
// Collect a new set of possible drops and save them in activeDrops.
// setActiveDrops should always be called when a drag has just started or a
// drag event happens while dynamicDrop is true
setActiveDrops: function (dragElement) {
// get dropzones and their elements that could receive the draggable
var possibleDrops = this.collectDrops(dragElement, true);
this.activeDrops.dropzones = possibleDrops.dropzones;
this.activeDrops.elements = possibleDrops.elements;
this.activeDrops.rects = [];
for (var i = 0; i < this.activeDrops.dropzones.length; i++) {
this.activeDrops.rects[i] = this.activeDrops.dropzones[i].getRect(this.activeDrops.elements[i]);
}
},
getDrop: function (event, dragElement) {
var validDrops = [];
if (dynamicDrop) {
this.setActiveDrops(dragElement);
}
// collect all dropzones and their elements which qualify for a drop
for (var j = 0; j < this.activeDrops.dropzones.length; j++) {
var current = this.activeDrops.dropzones[j],
currentElement = this.activeDrops.elements [j],
rect = this.activeDrops.rects [j];
validDrops.push(current.dropCheck(this.pointers[0], this.target, dragElement, currentElement, rect)
? currentElement
: null);
}
// get the most appropriate dropzone based on DOM depth and order
var dropIndex = indexOfDeepestElement(validDrops),
dropzone = this.activeDrops.dropzones[dropIndex] || null,
element = this.activeDrops.elements [dropIndex] || null;
return {
dropzone: dropzone,
element: element
};
},
getDropEvents: function (pointerEvent, dragEvent) {
var dragLeaveEvent = null,
dragEnterEvent = null,
dropActivateEvent = null,
dropDeactivateEvent = null,
dropMoveEvent = null,
dropEvent = null;
if (this.dropElement !== this.prevDropElement) {
// if there was a prevDropTarget, create a dragleave event
if (this.prevDropTarget) {
dragLeaveEvent = new InteractEvent(this, pointerEvent, 'drag', 'leave', this.prevDropElement, dragEvent.target);
dragLeaveEvent.draggable = dragEvent.interactable;
dragEvent.dragLeave = this.prevDropElement;
dragEvent.prevDropzone = this.prevDropTarget;
}
// if the dropTarget is not null, create a dragenter event
if (this.dropTarget) {
dragEnterEvent = new InteractEvent(this, pointerEvent, 'drag', 'enter', this.dropElement, dragEvent.target);
dragEnterEvent.draggable = dragEvent.interactable;
dragEvent.dragEnter = this.dropElement;
dragEvent.dropzone = this.dropTarget;
}
}
if (dragEvent.type === 'dragend' && this.dropTarget) {
dropEvent = new InteractEvent(this, pointerEvent, 'drop', null, this.dropElement, dragEvent.target);
dropEvent.draggable = dragEvent.interactable;
dragEvent.dropzone = this.dropTarget;
}
if (dragEvent.type === 'dragstart') {
dropActivateEvent = new InteractEvent(this, pointerEvent, 'drop', 'activate', this.element, dragEvent.target);
dropActivateEvent.draggable = dragEvent.interactable;
}
if (dragEvent.type === 'dragend') {
dropDeactivateEvent = new InteractEvent(this, pointerEvent, 'drop', 'deactivate', this.element, dragEvent.target);
dropDeactivateEvent.draggable = dragEvent.interactable;
}
if (dragEvent.type === 'dragmove' && this.dropTarget) {
dropMoveEvent = {
target : this.dropElement,
relatedTarget: dragEvent.target,
draggable : dragEvent.interactable,
dragmove : dragEvent,
type : 'dropmove',
timeStamp : dragEvent.timeStamp
};
dragEvent.dropzone = this.dropTarget;
}
return {
enter : dragEnterEvent,
leave : dragLeaveEvent,
activate : dropActivateEvent,
deactivate : dropDeactivateEvent,
move : dropMoveEvent,
drop : dropEvent
};
},
currentAction: function () {
return (this.dragging && 'drag') || (this.resizing && 'resize') || (this.gesturing && 'gesture') || null;
},
interacting: function () {
return this.dragging || this.resizing || this.gesturing;
},
clearTargets: function () {
if (this.target && !this.target.selector) {
this.target = this.element = null;
}
this.dropTarget = this.dropElement = this.prevDropTarget = this.prevDropElement = null;
},
stop: function (event) {
if (this.interacting()) {
autoScroll.stop();
this.matches = [];
this.matchElements = [];
var target = this.target;
if (target.options.styleCursor) {
target._doc.documentElement.style.cursor = '';
}
// prevent Default only if were previously interacting
if (event && isFunction(event.preventDefault)) {
this.checkAndPreventDefault(event, target, this.element);
}
if (this.dragging) {
this.activeDrops.dropzones = this.activeDrops.elements = this.activeDrops.rects = null;
}
this.clearTargets();
}
this.pointerIsDown = this.snapStatus.locked = this.dragging = this.resizing = this.gesturing = false;
this.prepared = this.prevEvent = null;
this.inertiaStatus.resumeDx = this.inertiaStatus.resumeDy = 0;
this.pointerIds .splice(0);
this.pointers .splice(0);
this.downTargets.splice(0);
this.downTimes .splice(0);
this.holdTimers .splice(0);
// delete interaction if it's not the only one
if (interactions.length > 1) {
interactions.splice(indexOf(interactions, this), 1);
}
},
inertiaFrame: function () {
var inertiaStatus = this.inertiaStatus,
options = this.target.options.inertia,
lambda = options.resistance,
t = new Date().getTime() / 1000 - inertiaStatus.t0;
if (t < inertiaStatus.te) {
var progress = 1 - (Math.exp(-lambda * t) - inertiaStatus.lambda_v0) / inertiaStatus.one_ve_v0;
if (inertiaStatus.modifiedXe === inertiaStatus.xe && inertiaStatus.modifiedYe === inertiaStatus.ye) {
inertiaStatus.sx = inertiaStatus.xe * progress;
inertiaStatus.sy = inertiaStatus.ye * progress;
}
else {
var quadPoint = getQuadraticCurvePoint(
0, 0,
inertiaStatus.xe, inertiaStatus.ye,
inertiaStatus.modifiedXe, inertiaStatus.modifiedYe,
progress);
inertiaStatus.sx = quadPoint.x;
inertiaStatus.sy = quadPoint.y;
}
this.pointerMove(inertiaStatus.startEvent, inertiaStatus.startEvent);
inertiaStatus.i = reqFrame(this.boundInertiaFrame);
}
else {
inertiaStatus.sx = inertiaStatus.modifiedXe;
inertiaStatus.sy = inertiaStatus.modifiedYe;
this.pointerMove(inertiaStatus.startEvent, inertiaStatus.startEvent);
inertiaStatus.active = false;
this.pointerEnd(inertiaStatus.startEvent, inertiaStatus.startEvent);
}
},
smoothEndFrame: function () {
var inertiaStatus = this.inertiaStatus,
t = new Date().getTime() - inertiaStatus.t0,
duration = this.target.options.inertia.smoothEndDuration;
if (t < duration) {
inertiaStatus.sx = easeOutQuad(t, 0, inertiaStatus.xe, duration);
inertiaStatus.sy = easeOutQuad(t, 0, inertiaStatus.ye, duration);
this.pointerMove(inertiaStatus.startEvent, inertiaStatus.startEvent);
inertiaStatus.i = reqFrame(this.boundSmoothEndFrame);
}
else {
inertiaStatus.sx = inertiaStatus.xe;
inertiaStatus.sy = inertiaStatus.ye;
this.pointerMove(inertiaStatus.startEvent, inertiaStatus.startEvent);
inertiaStatus.active = false;
inertiaStatus.smoothEnd = false;
this.pointerEnd(inertiaStatus.startEvent, inertiaStatus.startEvent);
}
},
addPointer: function (pointer) {
var id = getPointerId(pointer),
index = this.mouse? 0 : indexOf(this.pointerIds, id);
if (index === -1) {
index = this.pointerIds.length;
this.pointerIds.push(id);
}
this.pointers[index] = pointer;
return index;
},
removePointer: function (pointer) {
var id = getPointerId(pointer),
index = this.mouse? 0 : indexOf(this.pointerIds, id);
if (index === -1) { return; }
this.pointerIds .splice(index, 1);
this.pointers .splice(index, 1);
this.downTargets.splice(index, 1);
this.downTimes .splice(index, 1);
this.holdTimers .splice(index, 1);
},
recordPointer: function (pointer) {
// Do not update pointers while inertia is active.
// The inertia start event should be this.pointers[0]
if (this.inertiaStatus.active) { return; }
var index = this.mouse? 0: indexOf(this.pointerIds, getPointerId(pointer));
if (index === -1) { return; }
this.pointers[index] = pointer;
},
collectEventTargets: function (pointer, event, eventTarget, eventType) {
var pointerIndex = this.mouse? 0 : indexOf(this.pointerIds, getPointerId(pointer));
// do not fire a tap event if the pointer was moved before being lifted
if (eventType === 'tap' && (this.pointerWasMoved
// or if the pointerup target is different to the pointerdown target
|| !(this.downTargets[pointerIndex] && this.downTargets[pointerIndex] === eventTarget))) {
return;
}
var targets = [],
elements = [],
element = eventTarget;
function collectSelectors (interactable, selector, context) {
var els = ie8MatchesSelector
? context.querySelectorAll(selector)
: undefined;
if (interactable._iEvents[eventType]
&& isElement(element)
&& inContext(interactable, element)
&& !testIgnore(interactable, element, eventTarget)
&& testAllow(interactable, element, eventTarget)
&& matchesSelector(element, selector, els)) {
targets.push(interactable);
elements.push(element);
}
}
while (element) {
if (interact.isSet(element) && interact(element)._iEvents[eventType]) {
targets.push(interact(element));
elements.push(element);
}
interactables.forEachSelector(collectSelectors);
element = element.parentNode;
}
if (targets.length) {
this.firePointers(pointer, event, targets, elements, eventType);
}
},
firePointers: function (pointer, event, targets, elements, eventType) {
var pointerIndex = this.mouse? 0 : indexOf(getPointerId(pointer)),
pointerEvent = {},
i,
// for tap events
interval, dbl;
extend(pointerEvent, event);
if (event !== pointer) {
extend(pointerEvent, pointer);
}
pointerEvent.preventDefault = preventOriginalDefault;
pointerEvent.stopPropagation = InteractEvent.prototype.stopPropagation;
pointerEvent.stopImmediatePropagation = InteractEvent.prototype.stopImmediatePropagation;
pointerEvent.interaction = this;
pointerEvent.timeStamp = new Date().getTime();
pointerEvent.originalEvent = event;
pointerEvent.type = eventType;
pointerEvent.pointerId = getPointerId(pointer);
pointerEvent.pointerType = this.mouse? 'mouse' : !supportsPointerEvent? 'touch'
: isString(pointer.pointerType)
? pointer.pointerType
: [,,'touch', 'pen', 'mouse'][pointer.pointerType];
if (eventType === 'tap') {
pointerEvent.dt = pointerEvent.timeStamp - this.downTimes[pointerIndex];
interval = pointerEvent.timeStamp - this.tapTime;
dbl = (this.prevTap && this.prevTap.type !== 'doubletap'
&& this.prevTap.target === pointerEvent.target
&& interval < 500);
this.tapTime = pointerEvent.timeStamp;
}
for (i = 0; i < targets.length; i++) {
pointerEvent.currentTarget = elements[i];
pointerEvent.interactable = targets[i];
targets[i].fire(pointerEvent);
if (pointerEvent.immediatePropagationStopped
||(pointerEvent.propagationStopped && elements[i + 1] !== pointerEvent.currentTarget)) {
break;
}
}
if (dbl) {
var doubleTap = {};
extend(doubleTap, pointerEvent);
doubleTap.dt = interval;
doubleTap.type = 'doubletap';
for (i = 0; i < targets.length; i++) {
doubleTap.currentTarget = elements[i];
doubleTap.interactable = targets[i];
targets[i].fire(doubleTap);
if (doubleTap.immediatePropagationStopped
||(doubleTap.propagationStopped && elements[i + 1] !== doubleTap.currentTarget)) {
break;
}
}
this.prevTap = doubleTap;
}
else if (eventType === 'tap') {
this.prevTap = pointerEvent;
}
},
validateSelector: function (pointer, matches, matchElements) {
for (var i = 0, len = matches.length; i < len; i++) {
var match = matches[i],
matchElement = matchElements[i],
action = validateAction(match.getAction(pointer, this, matchElement), match);
if (action && withinInteractionLimit(match, matchElement, action)) {
this.target = match;
this.element = matchElement;
return action;
}
}
},
setSnapping: function (pageCoords, status) {
var snap = this.target.options.snap,
anchors = snap.anchors,
page,
closest,
range,
inRange,
snapChanged,
dx,
dy,
distance,
i, len;
status = status || this.snapStatus;
if (status.useStatusXY) {
page = { x: status.x, y: status.y };
}
else {
var origin = getOriginXY(this.target, this.element);
page = extend({}, pageCoords);
page.x -= origin.x;
page.y -= origin.y;
}
page.x -= this.inertiaStatus.resumeDx;
page.y -= this.inertiaStatus.resumeDy;
status.realX = page.x;
status.realY = page.y;
// change to infinite range when range is negative
if (snap.range < 0) { snap.range = Infinity; }
// create an anchor representative for each path's returned point
if (snap.mode === 'path') {
anchors = [];
for (i = 0, len = snap.paths.length; i < len; i++) {
var path = snap.paths[i];
if (isFunction(path)) {
path = path(page.x, page.y);
}
anchors.push({
x: isNumber(path.x) ? path.x : page.x,
y: isNumber(path.y) ? path.y : page.y,
range: isNumber(path.range)? path.range: snap.range
});
}
}
if ((snap.mode === 'anchor' || snap.mode === 'path') && anchors.length) {
closest = {
anchor: null,
distance: 0,
range: 0,
dx: 0,
dy: 0
};
for (i = 0, len = anchors.length; i < len; i++) {
var anchor = anchors[i];
range = isNumber(anchor.range)? anchor.range: snap.range;
dx = anchor.x - page.x + this.snapOffset.x;
dy = anchor.y - page.y + this.snapOffset.y;
distance = hypot(dx, dy);
inRange = distance < range;
// Infinite anchors count as being out of range
// compared to non infinite ones that are in range
if (range === Infinity && closest.inRange && closest.range !== Infinity) {
inRange = false;
}
if (!closest.anchor || (inRange?
// is the closest anchor in range?
(closest.inRange && range !== Infinity)?
// the pointer is relatively deeper in this anchor
distance / range < closest.distance / closest.range:
//the pointer is closer to this anchor
distance < closest.distance:
// The other is not in range and the pointer is closer to this anchor
(!closest.inRange && distance < closest.distance))) {
if (range === Infinity) {
inRange = true;
}
closest.anchor = anchor;
closest.distance = distance;
closest.range = range;
closest.inRange = inRange;
closest.dx = dx;
closest.dy = dy;
status.range = range;
}
}
inRange = closest.inRange;
snapChanged = (closest.anchor.x !== status.x || closest.anchor.y !== status.y);
status.snappedX = closest.anchor.x;
status.snappedY = closest.anchor.y;
status.dx = closest.dx;
status.dy = closest.dy;
}
else if (snap.mode === 'grid') {
var gridx = Math.round((page.x - snap.gridOffset.x - this.snapOffset.x) / snap.grid.x),
gridy = Math.round((page.y - snap.gridOffset.y - this.snapOffset.y) / snap.grid.y),
newX = gridx * snap.grid.x + snap.gridOffset.x + this.snapOffset.x,
newY = gridy * snap.grid.y + snap.gridOffset.y + this.snapOffset.y;
dx = newX - page.x;
dy = newY - page.y;
distance = hypot(dx, dy);
inRange = distance < snap.range;
snapChanged = (newX !== status.snappedX || newY !== status.snappedY);
status.snappedX = newX;
status.snappedY = newY;
status.dx = dx;
status.dy = dy;
status.range = snap.range;
}
status.changed = (snapChanged || (inRange && !status.locked));
status.locked = inRange;
return status;
},
setRestriction: function (pageCoords, status) {
var target = this.target,
action = /resize/.test(this.prepared)? 'resize' : this.prepared,
restrict = target && target.options.restrict,
restriction = restrict && restrict[action],
page;
if (!restriction) {
return status;
}
status = status || this.restrictStatus;
page = status.useStatusXY
? page = { x: status.x, y: status.y }
: page = extend({}, pageCoords);
if (status.snap && status.snap.locked) {
page.x += status.snap.dx || 0;
page.y += status.snap.dy || 0;
}
page.x -= this.inertiaStatus.resumeDx;
page.y -= this.inertiaStatus.resumeDy;
status.dx = 0;
status.dy = 0;
status.restricted = false;
var rect, restrictedX, restrictedY;
if (isString(restriction)) {
if (restriction === 'parent') {
restriction = this.element.parentNode;
}
else if (restriction === 'self') {
restriction = target.getRect(this.element);
}
else {
restriction = closest(this.element, restriction);
}
if (!restriction) { return status; }
}
if (isFunction(restriction)) {
restriction = restriction(page.x, page.y, this.element);
}
if (isElement(restriction)) {
restriction = getElementRect(restriction);
}
rect = restriction;
// object is assumed to have
// x, y, width, height or
// left, top, right, bottom
if ('x' in restriction && 'y' in restriction) {
restrictedX = Math.max(Math.min(rect.x + rect.width - this.restrictOffset.right , page.x), rect.x + this.restrictOffset.left);
restrictedY = Math.max(Math.min(rect.y + rect.height - this.restrictOffset.bottom, page.y), rect.y + this.restrictOffset.top );
}
else {
restrictedX = Math.max(Math.min(rect.right - this.restrictOffset.right , page.x), rect.left + this.restrictOffset.left);
restrictedY = Math.max(Math.min(rect.bottom - this.restrictOffset.bottom, page.y), rect.top + this.restrictOffset.top );
}
status.dx = restrictedX - page.x;
status.dy = restrictedY - page.y;
status.changed = status.restrictedX !== restrictedX || status.restrictedY !== restrictedY;
status.restricted = !!(status.dx || status.dy);
status.restrictedX = restrictedX;
status.restrictedY = restrictedY;
return status;
},
checkAndPreventDefault: function (event, interactable, element) {
if (!(interactable = interactable || this.target)) { return; }
var options = interactable.options,
prevent = options.preventDefault;
if (prevent === 'auto' && element && !/^input$|^textarea$/i.test(element.nodeName)) {
// do not preventDefault on pointerdown if the prepared action is a drag
// and dragging can only start from a certain direction - this allows
// a touch to pan the viewport if a drag isn't in the right direction
if (/down|start/i.test(event.type)
&& this.prepared === 'drag' && options.dragAxis !== 'xy') {
return;
}
event.preventDefault();
return;
}
if (prevent === true) {
event.preventDefault();
return;
}
},
calcInertia: function (status) {
var inertiaOptions = this.target.options.inertia,
lambda = inertiaOptions.resistance,
inertiaDur = -Math.log(inertiaOptions.endSpeed / status.v0) / lambda;
status.x0 = this.prevEvent.pageX;
status.y0 = this.prevEvent.pageY;
status.t0 = status.startEvent.timeStamp / 1000;
status.sx = status.sy = 0;
status.modifiedXe = status.xe = (status.vx0 - inertiaDur) / lambda;
status.modifiedYe = status.ye = (status.vy0 - inertiaDur) / lambda;
status.te = inertiaDur;
status.lambda_v0 = lambda / status.v0;
status.one_ve_v0 = 1 - inertiaOptions.endSpeed / status.v0;
}
};
function getInteractionFromPointer (pointer, eventType, eventTarget) {
var i = 0, len = interactions.length,
mouseEvent = (/mouse/i.test(pointer.pointerType || eventType)
// MSPointerEvent.MSPOINTER_TYPE_MOUSE
|| pointer.pointerType === 4),
interaction;
var id = getPointerId(pointer);
// try to resume inertia with a new pointer
if (/down|start/i.test(eventType)) {
for (i = 0; i < len; i++) {
interaction = interactions[i];
var element = eventTarget;
if (interaction.inertiaStatus.active && interaction.target.options.inertia.allowResume
&& (interaction.mouse === mouseEvent)) {
while (element) {
// if the element is the interaction element
if (element === interaction.element) {
// update the interaction's pointer
interaction.removePointer(interaction.pointers[0]);
interaction.addPointer(pointer);
return interaction;
}
element = element.parentNode;
}
}
}
}
// if it's a mouse interaction
if (mouseEvent || !(supportsTouch || supportsPointerEvent)) {
// find a mouse interaction that's not in inertia phase
for (i = 0; i < len; i++) {
if (interactions[i].mouse && !interactions[i].inertiaStatus.active) {
return interactions[i];
}
}
// find any interaction specifically for mouse.
// if the eventType is a mousedown, and inertia is active
// ignore the interaction
for (i = 0; i < len; i++) {
if (interactions[i].mouse && !(/down/.test(eventType) && interactions[i].inertiaStatus.active)) {
return interaction;
}
}
// create a new interaction for mouse
interaction = new Interaction();
interaction.mouse = true;
return interaction;
}
// get interaction that has this pointer
for (i = 0; i < len; i++) {
if (contains(interactions[i].pointerIds, id)) {
return interactions[i];
}
}
// at this stage, a pointerUp should not return an interaction
if (/up|end|out/i.test(eventType)) {
return null;
}
// get first idle interaction
for (i = 0; i < len; i++) {
interaction = interactions[i];
if ((!interaction.prepared || (interaction.target.gesturable()))
&& !interaction.interacting()
&& !(!mouseEvent && interaction.mouse)) {
interaction.addPointer(pointer);
return interaction;
}
}
return new Interaction();
}
function doOnInteractions (method) {
return (function (event) {
var interaction,
eventTarget = getActualElement(event.target),
curEventTarget = getActualElement(event.currentTarget),
i;
if (supportsTouch && /touch/.test(event.type)) {
prevTouchTime = new Date().getTime();
for (i = 0; i < event.changedTouches.length; i++) {
var pointer = event.changedTouches[i];
interaction = getInteractionFromPointer(pointer, event.type, eventTarget);
if (!interaction) { continue; }
interaction[method](pointer, event, eventTarget, curEventTarget);
}
}
else {
if (!supportsPointerEvent && /mouse/.test(event.type)) {
// ignore mouse events while touch interactions are active
for (i = 0; i < interactions.length; i++) {
if (!interactions[i].mouse && interactions[i].pointerIsDown) {
return;
}
}
// try to ignore mouse events that are simulated by the browser
// after a touch event
if (new Date().getTime() - prevTouchTime < 500) {
return;
}
}
interaction = getInteractionFromPointer(event, event.type, eventTarget);
if (!interaction) { return; }
interaction[method](event, event, eventTarget, curEventTarget);
}
});
}
function InteractEvent (interaction, event, action, phase, element, related) {
var client,
page,
target = interaction.target,
snapStatus = interaction.snapStatus,
restrictStatus = interaction.restrictStatus,
pointers = interaction.pointers,
deltaSource = (target && target.options || defaultOptions).deltaSource,
sourceX = deltaSource + 'X',
sourceY = deltaSource + 'Y',
options = target? target.options: defaultOptions,
origin = getOriginXY(target, element),
starting = phase === 'start',
ending = phase === 'end',
coords = starting? interaction.startCoords : interaction.curCoords;
element = element || interaction.element;
page = extend({}, coords.page);
client = extend({}, coords.client);
page.x -= origin.x;
page.y -= origin.y;
client.x -= origin.x;
client.y -= origin.y;
if (checkSnap(target, action) && !(starting && options.snap.elementOrigin)) {
this.snap = {
range : snapStatus.range,
locked : snapStatus.locked,
x : snapStatus.snappedX,
y : snapStatus.snappedY,
realX : snapStatus.realX,
realY : snapStatus.realY,
dx : snapStatus.dx,
dy : snapStatus.dy
};
if (snapStatus.locked) {
page.x += snapStatus.dx;
page.y += snapStatus.dy;
client.x += snapStatus.dx;
client.y += snapStatus.dy;
}
}
if (checkRestrict(target, action) && !(starting && options.restrict.elementRect) && restrictStatus.restricted) {
page.x += restrictStatus.dx;
page.y += restrictStatus.dy;
client.x += restrictStatus.dx;
client.y += restrictStatus.dy;
this.restrict = {
dx: restrictStatus.dx,
dy: restrictStatus.dy
};
}
this.pageX = page.x;
this.pageY = page.y;
this.clientX = client.x;
this.clientY = client.y;
this.x0 = interaction.startCoords.page.x;
this.y0 = interaction.startCoords.page.y;
this.clientX0 = interaction.startCoords.client.x;
this.clientY0 = interaction.startCoords.client.y;
this.ctrlKey = event.ctrlKey;
this.altKey = event.altKey;
this.shiftKey = event.shiftKey;
this.metaKey = event.metaKey;
this.button = event.button;
this.target = element;
this.t0 = interaction.downTimes[0];
this.type = action + (phase || '');
this.interaction = interaction;
this.interactable = target;
var inertiaStatus = interaction.inertiaStatus;
if (inertiaStatus.active) {
this.detail = 'inertia';
}
if (related) {
this.relatedTarget = related;
}
// end event dx, dy is difference between start and end points
if (ending || action === 'drop') {
if (deltaSource === 'client') {
this.dx = client.x - interaction.startCoords.client.x;
this.dy = client.y - interaction.startCoords.client.y;
}
else {
this.dx = page.x - interaction.startCoords.page.x;
this.dy = page.y - interaction.startCoords.page.y;
}
}
else if (starting) {
this.dx = 0;
this.dy = 0;
}
// copy properties from previousmove if starting inertia
else if (phase === 'inertiastart') {
this.dx = interaction.prevEvent.dx;
this.dy = interaction.prevEvent.dy;
}
else {
if (deltaSource === 'client') {
this.dx = client.x - interaction.prevEvent.clientX;
this.dy = client.y - interaction.prevEvent.clientY;
}
else {
this.dx = page.x - interaction.prevEvent.pageX;
this.dy = page.y - interaction.prevEvent.pageY;
}
}
if (interaction.prevEvent && interaction.prevEvent.detail === 'inertia'
&& !inertiaStatus.active && options.inertia.zeroResumeDelta) {
inertiaStatus.resumeDx += this.dx;
inertiaStatus.resumeDy += this.dy;
this.dx = this.dy = 0;
}
if (action === 'resize') {
if (options.squareResize || event.shiftKey) {
if (interaction.resizeAxes === 'y') {
this.dx = this.dy;
}
else {
this.dy = this.dx;
}
this.axes = 'xy';
}
else {
this.axes = interaction.resizeAxes;
if (interaction.resizeAxes === 'x') {
this.dy = 0;
}
else if (interaction.resizeAxes === 'y') {
this.dx = 0;
}
}
}
else if (action === 'gesture') {
this.touches = [pointers[0], pointers[1]];
if (starting) {
this.distance = touchDistance(pointers, deltaSource);
this.box = touchBBox(pointers);
this.scale = 1;
this.ds = 0;
this.angle = touchAngle(pointers, undefined, deltaSource);
this.da = 0;
}
else if (ending || event instanceof InteractEvent) {
this.distance = interaction.prevEvent.distance;
this.box = interaction.prevEvent.box;
this.scale = interaction.prevEvent.scale;
this.ds = this.scale - 1;
this.angle = interaction.prevEvent.angle;
this.da = this.angle - interaction.gesture.startAngle;
}
else {
this.distance = touchDistance(pointers, deltaSource);
this.box = touchBBox(pointers);
this.scale = this.distance / interaction.gesture.startDistance;
this.angle = touchAngle(pointers, interaction.gesture.prevAngle, deltaSource);
this.ds = this.scale - interaction.gesture.prevScale;
this.da = this.angle - interaction.gesture.prevAngle;
}
}
if (starting) {
this.timeStamp = interaction.downTimes[0];
this.dt = 0;
this.duration = 0;
this.speed = 0;
this.velocityX = 0;
this.velocityY = 0;
}
else if (phase === 'inertiastart') {
this.timeStamp = interaction.prevEvent.timeStamp;
this.dt = interaction.prevEvent.dt;
this.duration = interaction.prevEvent.duration;
this.speed = interaction.prevEvent.speed;
this.velocityX = interaction.prevEvent.velocityX;
this.velocityY = interaction.prevEvent.velocityY;
}
else {
this.timeStamp = new Date().getTime();
this.dt = this.timeStamp - interaction.prevEvent.timeStamp;
this.duration = this.timeStamp - interaction.downTimes[0];
if (event instanceof InteractEvent) {
var dx = this[sourceX] - interaction.prevEvent[sourceX],
dy = this[sourceY] - interaction.prevEvent[sourceY],
dt = this.dt / 1000;
this.speed = hypot(dx, dy) / dt;
this.velocityX = dx / dt;
this.velocityY = dy / dt;
}
// if normal move or end event, use previous user event coords
else {
// speed and velocity in pixels per second
this.speed = interaction.pointerDelta[deltaSource].speed;
this.velocityX = interaction.pointerDelta[deltaSource].vx;
this.velocityY = interaction.pointerDelta[deltaSource].vy;
}
}
if ((ending || phase === 'inertiastart')
&& interaction.prevEvent.speed > 600 && this.timeStamp - interaction.prevEvent.timeStamp < 150) {
var angle = 180 * Math.atan2(interaction.prevEvent.velocityY, interaction.prevEvent.velocityX) / Math.PI,
overlap = 22.5;
if (angle < 0) {
angle += 360;
}
var left = 135 - overlap <= angle && angle < 225 + overlap,
up = 225 - overlap <= angle && angle < 315 + overlap,
right = !left && (315 - overlap <= angle || angle < 45 + overlap),
down = !up && 45 - overlap <= angle && angle < 135 + overlap;
this.swipe = {
up : up,
down : down,
left : left,
right: right,
angle: angle,
speed: interaction.prevEvent.speed,
velocity: {
x: interaction.prevEvent.velocityX,
y: interaction.prevEvent.velocityY
}
};
}
}
InteractEvent.prototype = {
preventDefault: blank,
stopImmediatePropagation: function () {
this.immediatePropagationStopped = this.propagationStopped = true;
},
stopPropagation: function () {
this.propagationStopped = true;
}
};
function preventOriginalDefault () {
this.originalEvent.preventDefault();
}
function defaultActionChecker (pointer, interaction, element) {
var rect = this.getRect(element),
right,
bottom,
action = null,
page = extend({}, interaction.curCoords.page),
options = this.options;
if (!rect) { return null; }
if (actionIsEnabled.resize && options.resizable) {
right = options.resizeAxis !== 'y' && page.x > (rect.right - margin);
bottom = options.resizeAxis !== 'x' && page.y > (rect.bottom - margin);
}
interaction.resizeAxes = (right?'x': '') + (bottom?'y': '');
action = (interaction.resizeAxes)?
'resize' + interaction.resizeAxes:
actionIsEnabled.drag && options.draggable?
'drag':
null;
if (actionIsEnabled.gesture
&& interaction.pointerIds.length >=2
&& !(interaction.dragging || interaction.resizing)) {
action = 'gesture';
}
return action;
}
// Check if action is enabled globally and the current target supports it
// If so, return the validated action. Otherwise, return null
function validateAction (action, interactable) {
if (!isString(action)) { return null; }
var actionType = action.search('resize') !== -1? 'resize': action,
options = interactable;
if (( (actionType === 'resize' && options.resizable )
|| (action === 'drag' && options.draggable )
|| (action === 'gesture' && options.gesturable))
&& actionIsEnabled[actionType]) {
if (action === 'resize' || action === 'resizeyx') {
action = 'resizexy';
}
return action;
}
return null;
}
var listeners = {},
interactionListeners = [
'dragStart', 'dragMove', 'resizeStart', 'resizeMove', 'gestureStart', 'gestureMove',
'pointerOver', 'pointerOut', 'pointerHover', 'selectorDown',
'pointerDown', 'pointerMove', 'pointerUp', 'pointerCancel', 'pointerEnd',
'addPointer', 'removePointer', 'recordPointer',
];
for (var i = 0, len = interactionListeners.length; i < len; i++) {
var name = interactionListeners[i];
listeners[name] = doOnInteractions(name);
}
// bound to the interactable context when a DOM event
// listener is added to a selector interactable
function delegateListener (event, useCapture) {
var fakeEvent = {},
delegated = delegatedEvents[event.type],
element = event.target;
useCapture = useCapture? true: false;
// duplicate the event so that currentTarget can be changed
for (var prop in event) {
fakeEvent[prop] = event[prop];
}
fakeEvent.originalEvent = event;
fakeEvent.preventDefault = preventOriginalDefault;
// climb up document tree looking for selector matches
while (element && (element.ownerDocument && element !== element.ownerDocument)) {
for (var i = 0; i < delegated.selectors.length; i++) {
var selector = delegated.selectors[i],
context = delegated.contexts[i];
if (matchesSelector(element, selector)
&& nodeContains(context, event.target)
&& nodeContains(context, element)) {
var listeners = delegated.listeners[i];
fakeEvent.currentTarget = element;
for (var j = 0; j < listeners.length; j++) {
if (listeners[j][1] === useCapture) {
listeners[j][0](fakeEvent);
}
}
}
}
element = element.parentNode;
}
}
function delegateUseCapture (event) {
return delegateListener.call(this, event, true);
}
interactables.indexOfElement = function indexOfElement (element, context) {
context = context || document;
for (var i = 0; i < this.length; i++) {
var interactable = this[i];
if ((interactable.selector === element
&& (interactable._context === context))
|| (!interactable.selector && interactable._element === element)) {
return i;
}
}
return -1;
};
interactables.get = function interactableGet (element, options) {
return this[this.indexOfElement(element, options && options.context)];
};
interactables.forEachSelector = function (callback) {
for (var i = 0; i < this.length; i++) {
var interactable = this[i];
if (!interactable.selector) {
continue;
}
var ret = callback(interactable, interactable.selector, interactable._context, i, this);
if (ret !== undefined) {
return ret;
}
}
};
/*\
* interact
[ method ]
*
* The methods of this variable can be used to set elements as
* interactables and also to change various default settings.
*
* Calling it as a function and passing an element or a valid CSS selector
* string returns an Interactable object which has various methods to
* configure it.
*
- element (Element | string) The HTML or SVG Element to interact with or CSS selector
= (object) An @Interactable
*
> Usage
| interact(document.getElementById('draggable')).draggable(true);
|
| var rectables = interact('rect');
| rectables
| .gesturable(true)
| .on('gesturemove', function (event) {
| // something cool...
| })
| .autoScroll(true);
\*/
function interact (element, options) {
return interactables.get(element, options) || new Interactable(element, options);
}
// A class for easy inheritance and setting of an Interactable's options
function IOptions (options) {
for (var option in defaultOptions) {
if (options.hasOwnProperty(option)
&& typeof options[option] === typeof defaultOptions[option]) {
this[option] = options[option];
}
}
}
IOptions.prototype = defaultOptions;
/*\
* Interactable
[ property ]
**
* Object type returned by @interact
\*/
function Interactable (element, options) {
this._element = element;
this._iEvents = this._iEvents || {};
var _window;
if (trySelector(element)) {
this.selector = element;
var context = options && options.context;
_window = context? getWindow(context) : window;
if (context && (_window.Node
? context instanceof _window.Node
: (isElement(context) || context === _window.document))) {
this._context = context;
}
}
else {
_window = getWindow(element);
if (isElement(element, _window)) {
if (PointerEvent) {
events.add(this._element, pEventTypes.down, listeners.pointerDown );
events.add(this._element, pEventTypes.move, listeners.pointerHover);
}
else {
events.add(this._element, 'mousedown' , listeners.pointerDown );
events.add(this._element, 'mousemove' , listeners.pointerHover);
events.add(this._element, 'touchstart', listeners.pointerDown );
events.add(this._element, 'touchmove' , listeners.pointerHover);
}
}
}
this._doc = _window.document;
if (!contains(documents, this._doc)) {
listenToDocument(this._doc);
}
interactables.push(this);
this.set(options);
}
Interactable.prototype = {
setOnEvents: function (action, phases) {
if (action === 'drop') {
var drop = phases.ondrop || phases.onDrop || phases.drop,
dropactivate = phases.ondropactivate || phases.onDropActivate || phases.dropactivate
|| phases.onactivate || phases.onActivate || phases.activate,
dropdeactivate = phases.ondropdeactivate || phases.onDropDeactivate || phases.dropdeactivate
|| phases.ondeactivate || phases.onDeactivate || phases.deactivate,
dragenter = phases.ondragenter || phases.onDropEnter || phases.dragenter
|| phases.onenter || phases.onEnter || phases.enter,
dragleave = phases.ondragleave || phases.onDropLeave || phases.dragleave
|| phases.onleave || phases.onLeave || phases.leave,
dropmove = phases.ondropmove || phases.onDropMove || phases.dropmove
|| phases.onmove || phases.onMove || phases.move;
if (isFunction(drop) ) { this.ondrop = drop ; }
if (isFunction(dropactivate) ) { this.ondropactivate = dropactivate ; }
if (isFunction(dropdeactivate)) { this.ondropdeactivate = dropdeactivate; }
if (isFunction(dragenter) ) { this.ondragenter = dragenter ; }
if (isFunction(dragleave) ) { this.ondragleave = dragleave ; }
if (isFunction(dropmove) ) { this.ondropmove = dropmove ; }
}
else {
var start = phases.onstart || phases.onStart || phases.start,
move = phases.onmove || phases.onMove || phases.move,
end = phases.onend || phases.onEnd || phases.end,
inertiastart = phases.oninertiastart || phases.onInertiaStart || phases.inertiastart;
action = 'on' + action;
if (isFunction(start) ) { this[action + 'start' ] = start ; }
if (isFunction(move) ) { this[action + 'move' ] = move ; }
if (isFunction(end) ) { this[action + 'end' ] = end ; }
if (isFunction(inertiastart)) { this[action + 'inertiastart' ] = inertiastart ; }
}
return this;
},
/*\
* Interactable.draggable
[ method ]
*
* Gets or sets whether drag actions can be performed on the
* Interactable
*
= (boolean) Indicates if this can be the target of drag events
| var isDraggable = interact('ul li').draggable();
* or
- options (boolean | object) #optional true/false or An object with event listeners to be fired on drag events (object makes the Interactable draggable)
= (object) This Interactable
| interact(element).draggable({
| onstart: function (event) {},
| onmove : function (event) {},
| onend : function (event) {},
|
| // the axis in which the first movement must be
| // for the drag sequence to start
| // 'xy' by default - any direction
| axis: 'x' || 'y' || 'xy',
|
| // max number of drags that can happen concurrently
| // with elements of this Interactable. 1 by default
| max: Infinity,
|
| // max number of drags that can target the same element
| // 1 by default
| maxPerElement: 2
| });
\*/
draggable: function (options) {
if (isObject(options)) {
this.options.draggable = true;
this.setOnEvents('drag', options);
if (isNumber(options.max)) {
this.options.dragMax = options.max;
}
if (isNumber(options.maxPerElement)) {
this.options.dragMaxPerElement = options.maxPerElement;
}
if (/^x$|^y$|^xy$/.test(options.axis)) {
this.options.dragAxis = options.axis;
}
else if (options.axis === null) {
delete this.options.dragAxis;
}
return this;
}
if (isBool(options)) {
this.options.draggable = options;
return this;
}
if (options === null) {
delete this.options.draggable;
return this;
}
return this.options.draggable;
},
/*\
* Interactable.dropzone
[ method ]
*
* Returns or sets whether elements can be dropped onto this
* Interactable to trigger drop events
*
* Dropzones can receive the following events:
* - `dragactivate` and `dragdeactivate` when an acceptable drag starts and ends
* - `dragenter` and `dragleave` when a draggable enters and leaves the dropzone
* - `drop` when a draggable is dropped into this dropzone
*
* Use the `accept` option to allow only elements that match the given CSS selector or element.
*
* Use the `overlap` option to set how drops are checked for. The allowed values are:
* - `'pointer'`, the pointer must be over the dropzone (default)
* - `'center'`, the draggable element's center must be over the dropzone
* - a number from 0-1 which is the `(intersection area) / (draggable area)`.
* e.g. `0.5` for drop to happen when half of the area of the
* draggable is over the dropzone
*
- options (boolean | object | null) #optional The new value to be set.
| interact('.drop').dropzone({
| accept: '.can-drop' || document.getElementById('single-drop'),
| overlap: 'pointer' || 'center' || zeroToOne
| }
= (boolean | object) The current setting or this Interactable
\*/
dropzone: function (options) {
if (isObject(options)) {
this.options.dropzone = true;
this.setOnEvents('drop', options);
this.accept(options.accept);
if (/^(pointer|center)$/.test(options.overlap)) {
this.options.dropOverlap = options.overlap;
}
else if (isNumber(options.overlap)) {
this.options.dropOverlap = Math.max(Math.min(1, options.overlap), 0);
}
return this;
}
if (isBool(options)) {
this.options.dropzone = options;
return this;
}
if (options === null) {
delete this.options.dropzone;
return this;
}
return this.options.dropzone;
},
/*\
* Interactable.dropCheck
[ method ]
*
* The default function to determine if a dragend event occured over
* this Interactable's element. Can be overridden using
* @Interactable.dropChecker.
*
- pointer (MouseEvent | PointerEvent | Touch) The event that ends a drag
- draggable (Interactable) The Interactable being dragged
- draggableElement (Element) The actual element that's being dragged
- dropElement (Element) The dropzone element
- rect (object) #optional The rect of dropElement
= (boolean) whether the pointer was over this Interactable
\*/
dropCheck: function (pointer, draggable, draggableElement, dropElement, rect) {
if (!(rect = rect || this.getRect(dropElement))) {
return false;
}
var dropOverlap = this.options.dropOverlap;
if (dropOverlap === 'pointer') {
var page = getPageXY(pointer),
origin = getOriginXY(draggable, draggableElement),
horizontal,
vertical;
page.x += origin.x;
page.y += origin.y;
horizontal = (page.x > rect.left) && (page.x < rect.right);
vertical = (page.y > rect.top ) && (page.y < rect.bottom);
return horizontal && vertical;
}
var dragRect = draggable.getRect(draggableElement);
if (dropOverlap === 'center') {
var cx = dragRect.left + dragRect.width / 2,
cy = dragRect.top + dragRect.height / 2;
return cx >= rect.left && cx <= rect.right && cy >= rect.top && cy <= rect.bottom;
}
if (isNumber(dropOverlap)) {
var overlapArea = (Math.max(0, Math.min(rect.right , dragRect.right ) - Math.max(rect.left, dragRect.left))
* Math.max(0, Math.min(rect.bottom, dragRect.bottom) - Math.max(rect.top , dragRect.top ))),
overlapRatio = overlapArea / (dragRect.width * dragRect.height);
return overlapRatio >= dropOverlap;
}
},
/*\
* Interactable.dropChecker
[ method ]
*
* Gets or sets the function used to check if a dragged element is
* over this Interactable. See @Interactable.dropCheck.
*
- checker (function) #optional
* The checker is a function which takes a mouseUp/touchEnd event as a
* parameter and returns true or false to indicate if the the current
* draggable can be dropped into this Interactable
*
= (Function | Interactable) The checker function or this Interactable
\*/
dropChecker: function (checker) {
if (isFunction(checker)) {
this.dropCheck = checker;
return this;
}
return this.dropCheck;
},
/*\
* Interactable.accept
[ method ]
*
* Gets or sets the Element or CSS selector match that this
* Interactable accepts if it is a dropzone.
*
- newValue (Element | string | null) #optional
* If it is an Element, then only that element can be dropped into this dropzone.
* If it is a string, the element being dragged must match it as a selector.
* If it is null, the accept options is cleared - it accepts any element.
*
= (string | Element | null | Interactable) The current accept option if given `undefined` or this Interactable
\*/
accept: function (newValue) {
if (isElement(newValue)) {
this.options.accept = newValue;
return this;
}
// test if it is a valid CSS selector
if (trySelector(newValue)) {
this.options.accept = newValue;
return this;
}
if (newValue === null) {
delete this.options.accept;
return this;
}
return this.options.accept;
},
/*\
* Interactable.resizable
[ method ]
*
* Gets or sets whether resize actions can be performed on the
* Interactable
*
= (boolean) Indicates if this can be the target of resize elements
| var isResizeable = interact('input[type=text]').resizable();
* or
- options (boolean | object) #optional true/false or An object with event listeners to be fired on resize events (object makes the Interactable resizable)
= (object) This Interactable
| interact(element).resizable({
| onstart: function (event) {},
| onmove : function (event) {},
| onend : function (event) {},
|
| axis : 'x' || 'y' || 'xy' // default is 'xy',
|
| // limit multiple resizes.
| // See the explanation in @Interactable.draggable example
| max: 1,
| maxPerElement: 1,
| });
\*/
resizable: function (options) {
if (isObject(options)) {
this.options.resizable = true;
this.setOnEvents('resize', options);
if (isNumber(options.max)) {
this.options.resizeMax = options.max;
}
if (isNumber(options.maxPerElement)) {
this.options.resizeMaxPerElement = options.maxPerElement;
}
if (/^x$|^y$|^xy$/.test(options.axis)) {
this.options.resizeAxis = options.axis;
}
else if (options.axis === null) {
this.options.resizeAxis = defaultOptions.resizeAxis;
}
return this;
}
if (isBool(options)) {
this.options.resizable = options;
return this;
}
return this.options.resizable;
},
// misspelled alias
resizeable: blank,
/*\
* Interactable.squareResize
[ method ]
*
* Gets or sets whether resizing is forced 1:1 aspect
*
= (boolean) Current setting
*
* or
*
- newValue (boolean) #optional
= (object) this Interactable
\*/
squareResize: function (newValue) {
if (isBool(newValue)) {
this.options.squareResize = newValue;
return this;
}
if (newValue === null) {
delete this.options.squareResize;
return this;
}
return this.options.squareResize;
},
/*\
* Interactable.gesturable
[ method ]
*
* Gets or sets whether multitouch gestures can be performed on the
* Interactable's element
*
= (boolean) Indicates if this can be the target of gesture events
| var isGestureable = interact(element).gesturable();
* or
- options (boolean | object) #optional true/false or An object with event listeners to be fired on gesture events (makes the Interactable gesturable)
= (object) this Interactable
| interact(element).gesturable({
| onstart: function (event) {},
| onmove : function (event) {},
| onend : function (event) {},
|
| // limit multiple gestures.
| // See the explanation in @Interactable.draggable example
| max: 1,
| maxPerElement: 1,
| });
\*/
gesturable: function (options) {
if (isObject(options)) {
this.options.gesturable = true;
this.setOnEvents('gesture', options);
if (isNumber(options.max)) {
this.options.gestureMax = options.max;
}
if (isNumber(options.maxPerElement)) {
this.options.gestureMaxPerElement = options.maxPerElement;
}
return this;
}
if (isBool(options)) {
this.options.gesturable = options;
return this;
}
if (options === null) {
delete this.options.gesturable;
return this;
}
return this.options.gesturable;
},
// misspelled alias
gestureable: blank,
/*\
* Interactable.autoScroll
[ method ]
*
* Returns or sets whether or not any actions near the edges of the
* window/container trigger autoScroll for this Interactable
*
= (boolean | object)
* `false` if autoScroll is disabled; object with autoScroll properties
* if autoScroll is enabled
*
* or
*
- options (object | boolean | null) #optional
* options can be:
* - an object with margin, distance and interval properties,
* - true or false to enable or disable autoScroll or
* - null to use default settings
= (Interactable) this Interactable
\*/
autoScroll: function (options) {
var defaults = defaultOptions.autoScroll;
if (isObject(options)) {
var autoScroll = this.options.autoScroll;
if (autoScroll === defaults) {
autoScroll = this.options.autoScroll = {
margin : defaults.margin,
distance : defaults.distance,
interval : defaults.interval,
container: defaults.container
};
}
autoScroll.margin = this.validateSetting('autoScroll', 'margin', options.margin);
autoScroll.speed = this.validateSetting('autoScroll', 'speed' , options.speed);
autoScroll.container =
(isElement(options.container) || isWindow(options.container)
? options.container
: defaults.container);
this.options.autoScrollEnabled = true;
this.options.autoScroll = autoScroll;
return this;
}
if (isBool(options)) {
this.options.autoScrollEnabled = options;
return this;
}
if (options === null) {
delete this.options.autoScrollEnabled;
delete this.options.autoScroll;
return this;
}
return (this.options.autoScrollEnabled
? this.options.autoScroll
: false);
},
/*\
* Interactable.snap
[ method ]
**
* Returns or sets if and how action coordinates are snapped. By
* default, snapping is relative to the pointer coordinates. You can
* change this by setting the
* [`elementOrigin`](https://github.com/taye/interact.js/pull/72).
**
= (boolean | object) `false` if snap is disabled; object with snap properties if snap is enabled
**
* or
**
- options (object | boolean | null) #optional
= (Interactable) this Interactable
> Usage
| interact('.handle').snap({
| mode : 'grid', // event coords should snap to the corners of a grid
| range : Infinity, // the effective distance of snap points
| grid : { x: 100, y: 100 }, // the x and y spacing of the grid points
| gridOffset : { x: 0, y: 0 }, // the offset of the grid points
| });
|
| interact('.handle').snap({
| mode : 'anchor', // snap to specified points
| anchors : [
| { x: 100, y: 100, range: 25 }, // a point with x, y and a specific range
| { x: 200, y: 200 } // a point with x and y. it uses the default range
| ]
| });
|
| interact(document.querySelector('#thing')).snap({
| mode : 'path',
| paths: [
| { // snap to points on these x and y axes
| x: 100,
| y: 100,
| range: 25
| },
| // give this function the x and y page coords and snap to the object returned
| function (x, y) {
| return {
| x: x,
| y: (75 + 50 * Math.sin(x * 0.04)),
| range: 40
| };
| }]
| })
|
| interact(element).snap({
| // do not snap during normal movement.
| // Instead, trigger only one snapped move event
| // immediately before the end event.
| endOnly: true,
|
| // https://github.com/taye/interact.js/pull/72#issue-41813493
| elementOrigin: { x: 0, y: 0 }
| });
\*/
snap: function (options) {
var defaults = defaultOptions.snap;
if (isObject(options)) {
var snap = this.options.snap;
if (snap === defaults) {
snap = {};
}
snap.mode = this.validateSetting('snap', 'mode' , options.mode);
snap.endOnly = this.validateSetting('snap', 'endOnly' , options.endOnly);
snap.actions = this.validateSetting('snap', 'actions' , options.actions);
snap.range = this.validateSetting('snap', 'range' , options.range);
snap.paths = this.validateSetting('snap', 'paths' , options.paths);
snap.grid = this.validateSetting('snap', 'grid' , options.grid);
snap.gridOffset = this.validateSetting('snap', 'gridOffset' , options.gridOffset);
snap.anchors = this.validateSetting('snap', 'anchors' , options.anchors);
snap.elementOrigin = this.validateSetting('snap', 'elementOrigin', options.elementOrigin);
this.options.snapEnabled = true;
this.options.snap = snap;
return this;
}
if (isBool(options)) {
this.options.snapEnabled = options;
return this;
}
if (options === null) {
delete this.options.snapEnabled;
delete this.options.snap;
return this;
}
return (this.options.snapEnabled
? this.options.snap
: false);
},
/*\
* Interactable.inertia
[ method ]
**
* Returns or sets if and how events continue to run after the pointer is released
**
= (boolean | object) `false` if inertia is disabled; `object` with inertia properties if inertia is enabled
**
* or
**
- options (object | boolean | null) #optional
= (Interactable) this Interactable
> Usage
| // enable and use default settings
| interact(element).inertia(true);
|
| // enable and use custom settings
| interact(element).inertia({
| // value greater than 0
| // high values slow the object down more quickly
| resistance : 16,
|
| // the minimum launch speed (pixels per second) that results in inertia start
| minSpeed : 200,
|
| // inertia will stop when the object slows down to this speed
| endSpeed : 20,
|
| // boolean; should actions be resumed when the pointer goes down during inertia
| allowResume : true,
|
| // boolean; should the jump when resuming from inertia be ignored in event.dx/dy
| zeroResumeDelta: false,
|
| // if snap/restrict are set to be endOnly and inertia is enabled, releasing
| // the pointer without triggering inertia will animate from the release
| // point to the snaped/restricted point in the given amount of time (ms)
| smoothEndDuration: 300,
|
| // an array of action types that can have inertia (no gesture)
| actions : ['drag', 'resize']
| });
|
| // reset custom settings and use all defaults
| interact(element).inertia(null);
\*/
inertia: function (options) {
var defaults = defaultOptions.inertia;
if (isObject(options)) {
var inertia = this.options.inertia;
if (inertia === defaults) {
inertia = this.options.inertia = {
resistance : defaults.resistance,
minSpeed : defaults.minSpeed,
endSpeed : defaults.endSpeed,
actions : defaults.actions,
allowResume : defaults.allowResume,
zeroResumeDelta : defaults.zeroResumeDelta,
smoothEndDuration: defaults.smoothEndDuration
};
}
inertia.resistance = this.validateSetting('inertia', 'resistance' , options.resistance);
inertia.minSpeed = this.validateSetting('inertia', 'minSpeed' , options.minSpeed);
inertia.endSpeed = this.validateSetting('inertia', 'endSpeed' , options.endSpeed);
inertia.actions = this.validateSetting('inertia', 'actions' , options.actions);
inertia.allowResume = this.validateSetting('inertia', 'allowResume' , options.allowResume);
inertia.zeroResumeDelta = this.validateSetting('inertia', 'zeroResumeDelta' , options.zeroResumeDelta);
inertia.smoothEndDuration = this.validateSetting('inertia', 'smoothEndDuration', options.smoothEndDuration);
this.options.inertiaEnabled = true;
this.options.inertia = inertia;
return this;
}
if (isBool(options)) {
this.options.inertiaEnabled = options;
return this;
}
if (options === null) {
delete this.options.inertiaEnabled;
delete this.options.inertia;
return this;
}
return (this.options.inertiaEnabled
? this.options.inertia
: false);
},
getAction: function (pointer, interaction, element) {
var action = this.defaultActionChecker(pointer, interaction, element);
if (this.options.actionChecker) {
action = this.options.actionChecker(pointer, action, this, element, interaction);
}
return action;
},
defaultActionChecker: defaultActionChecker,
/*\
* Interactable.actionChecker
[ method ]
*
* Gets or sets the function used to check action to be performed on
* pointerDown
*
- checker (function | null) #optional A function which takes a pointer event, defaultAction string and an interactable as parameters and returns 'drag' 'resize[axes]' or 'gesture' or null.
= (Function | Interactable) The checker function or this Interactable
\*/
actionChecker: function (newValue) {
if (isFunction(newValue)) {
this.options.actionChecker = newValue;
return this;
}
if (newValue === null) {
delete this.options.actionChecker;
return this;
}
return this.options.actionChecker;
},
/*\
* Interactable.getRect
[ method ]
*
* The default function to get an Interactables bounding rect. Can be
* overridden using @Interactable.rectChecker.
*
- element (Element) #optional The element to measure. Meant to be used for selector Interactables which don't have a specific element.
= (object) The object's bounding rectangle.
o {
o top : 0,
o left : 0,
o bottom: 0,
o right : 0,
o width : 0,
o height: 0
o }
\*/
getRect: function rectCheck (element) {
element = element || this._element;
if (this.selector && !(isElement(element))) {
element = this._context.querySelector(this.selector);
}
return getElementRect(element);
},
/*\
* Interactable.rectChecker
[ method ]
*
* Returns or sets the function used to calculate the interactable's
* element's rectangle
*
- checker (function) #optional A function which returns this Interactable's bounding rectangle. See @Interactable.getRect
= (function | object) The checker function or this Interactable
\*/
rectChecker: function (checker) {
if (isFunction(checker)) {
this.getRect = checker;
return this;
}
if (checker === null) {
delete this.options.getRect;
return this;
}
return this.getRect;
},
/*\
* Interactable.styleCursor
[ method ]
*
* Returns or sets whether the action that would be performed when the
* mouse on the element are checked on `mousemove` so that the cursor
* may be styled appropriately
*
- newValue (boolean) #optional
= (boolean | Interactable) The current setting or this Interactable
\*/
styleCursor: function (newValue) {
if (isBool(newValue)) {
this.options.styleCursor = newValue;
return this;
}
if (newValue === null) {
delete this.options.styleCursor;
return this;
}
return this.options.styleCursor;
},
/*\
* Interactable.preventDefault
[ method ]
*
* Returns or sets whether to prevent the browser's default behaviour
* in response to pointer events. Can be set to
* - `true` to always prevent
* - `false` to never prevent
* - `'auto'` to allow interact.js to try to guess what would be best
* - `null` to set to the default ('auto')
*
- newValue (boolean | string | null) #optional `true`, `false` or `'auto'`
= (boolean | string | Interactable) The current setting or this Interactable
\*/
preventDefault: function (newValue) {
if (isBool(newValue) || newValue === 'auto') {
this.options.preventDefault = newValue;
return this;
}
if (newValue === null) {
delete this.options.preventDefault;
return this;
}
return this.options.preventDefault;
},
/*\
* Interactable.origin
[ method ]
*
* Gets or sets the origin of the Interactable's element. The x and y
* of the origin will be subtracted from action event coordinates.
*
- origin (object | string) #optional An object eg. { x: 0, y: 0 } or string 'parent', 'self' or any CSS selector
* OR
- origin (Element) #optional An HTML or SVG Element whose rect will be used
**
= (object) The current origin or this Interactable
\*/
origin: function (newValue) {
if (trySelector(newValue)) {
this.options.origin = newValue;
return this;
}
else if (isObject(newValue)) {
this.options.origin = newValue;
return this;
}
if (newValue === null) {
delete this.options.origin;
return this;
}
return this.options.origin;
},
/*\
* Interactable.deltaSource
[ method ]
*
* Returns or sets the mouse coordinate types used to calculate the
* movement of the pointer.
*
- newValue (string) #optional Use 'client' if you will be scrolling while interacting; Use 'page' if you want autoScroll to work
= (string | object) The current deltaSource or this Interactable
\*/
deltaSource: function (newValue) {
if (newValue === 'page' || newValue === 'client') {
this.options.deltaSource = newValue;
return this;
}
if (newValue === null) {
delete this.options.deltaSource;
return this;
}
return this.options.deltaSource;
},
/*\
* Interactable.restrict
[ method ]
**
* Returns or sets the rectangles within which actions on this
* interactable (after snap calculations) are restricted. By default,
* restricting is relative to the pointer coordinates. You can change
* this by setting the
* [`elementRect`](https://github.com/taye/interact.js/pull/72).
**
- newValue (object) #optional an object with keys drag, resize, and/or gesture whose values are rects, Elements, CSS selectors, or 'parent' or 'self'
= (object) The current restrictions object or this Interactable
**
| interact(element).restrict({
| // the rect will be `interact.getElementRect(element.parentNode)`
| drag: element.parentNode,
|
| // x and y are relative to the the interactable's origin
| resize: { x: 100, y: 100, width: 200, height: 200 }
| })
|
| interact('.draggable').restrict({
| // the rect will be the selected element's parent
| drag: 'parent',
|
| // do not restrict during normal movement.
| // Instead, trigger only one restricted move event
| // immediately before the end event.
| endOnly: true,
|
| // https://github.com/taye/interact.js/pull/72#issue-41813493
| elementRect: { top: 0, left: 0, bottom: 1, right: 1 }
| });
\*/
restrict: function (newValue) {
if (newValue === undefined) {
return this.options.restrict;
}
if (isBool(newValue)) {
defaultOptions.restrictEnabled = newValue;
}
else if (isObject(newValue)) {
var newRestrictions = {};
if (isObject(newValue.drag) || trySelector(newValue.drag)) {
newRestrictions.drag = newValue.drag;
}
if (isObject(newValue.resize) || trySelector(newValue.resize)) {
newRestrictions.resize = newValue.resize;
}
if (isObject(newValue.gesture) || trySelector(newValue.gesture)) {
newRestrictions.gesture = newValue.gesture;
}
if (isBool(newValue.endOnly)) {
newRestrictions.endOnly = newValue.endOnly;
}
if (isObject(newValue.elementRect)) {
newRestrictions.elementRect = newValue.elementRect;
}
this.options.restrictEnabled = true;
this.options.restrict = newRestrictions;
}
else if (newValue === null) {
delete this.options.restrict;
delete this.options.restrictEnabled;
}
return this;
},
/*\
* Interactable.context
[ method ]
*
* Get's the selector context Node of the Interactable. The default is `window.document`.
*
= (Node) The context Node of this Interactable
**
\*/
context: function () {
return this._context;
},
_context: document,
/*\
* Interactable.ignoreFrom
[ method ]
*
* If the target of the `mousedown`, `pointerdown` or `touchstart`
* event or any of it's parents match the given CSS selector or
* Element, no drag/resize/gesture is started.
*
- newValue (string | Element | null) #optional a CSS selector string, an Element or `null` to not ignore any elements
= (string | Element | object) The current ignoreFrom value or this Interactable
**
| interact(element, { ignoreFrom: document.getElementById('no-action') });
| // or
| interact(element).ignoreFrom('input, textarea, a');
\*/
ignoreFrom: function (newValue) {
if (trySelector(newValue)) { // CSS selector to match event.target
this.options.ignoreFrom = newValue;
return this;
}
if (isElement(newValue)) { // specific element
this.options.ignoreFrom = newValue;
return this;
}
if (newValue === null) {
delete this.options.ignoreFrom;
return this;
}
return this.options.ignoreFrom;
},
/*\
* Interactable.allowFrom
[ method ]
*
* A drag/resize/gesture is started only If the target of the
* `mousedown`, `pointerdown` or `touchstart` event or any of it's
* parents match the given CSS selector or Element.
*
- newValue (string | Element | null) #optional a CSS selector string, an Element or `null` to allow from any element
= (string | Element | object) The current allowFrom value or this Interactable
**
| interact(element, { allowFrom: document.getElementById('drag-handle') });
| // or
| interact(element).allowFrom('.handle');
\*/
allowFrom: function (newValue) {
if (trySelector(newValue)) { // CSS selector to match event.target
this.options.allowFrom = newValue;
return this;
}
if (isElement(newValue)) { // specific element
this.options.allowFrom = newValue;
return this;
}
if (newValue === null) {
delete this.options.allowFrom;
return this;
}
return this.options.allowFrom;
},
/*\
* Interactable.validateSetting
[ method ]
*
- context (string) eg. 'snap', 'autoScroll'
- option (string) The name of the value being set
- value (any type) The value being validated
*
= (typeof value) A valid value for the give context-option pair
* - null if defaultOptions[context][value] is undefined
* - value if it is the same type as defaultOptions[context][value],
* - this.options[context][value] if it is the same type as defaultOptions[context][value],
* - or defaultOptions[context][value]
\*/
validateSetting: function (context, option, value) {
var defaults = defaultOptions[context],
current = this.options[context];
if (defaults !== undefined && defaults[option] !== undefined) {
if ('objectTypes' in defaults && defaults.objectTypes.test(option)) {
if (isObject(value)) { return value; }
else {
return (option in current && isObject(current[option])
? current [option]
: defaults[option]);
}
}
if ('arrayTypes' in defaults && defaults.arrayTypes.test(option)) {
if (isArray(value)) { return value; }
else {
return (option in current && isArray(current[option])
? current[option]
: defaults[option]);
}
}
if ('stringTypes' in defaults && defaults.stringTypes.test(option)) {
if (isString(value)) { return value; }
else {
return (option in current && isString(current[option])
? current[option]
: defaults[option]);
}
}
if ('numberTypes' in defaults && defaults.numberTypes.test(option)) {
if (isNumber(value)) { return value; }
else {
return (option in current && isNumber(current[option])
? current[option]
: defaults[option]);
}
}
if ('boolTypes' in defaults && defaults.boolTypes.test(option)) {
if (isBool(value)) { return value; }
else {
return (option in current && isBool(current[option])
? current[option]
: defaults[option]);
}
}
if ('elementTypes' in defaults && defaults.elementTypes.test(option)) {
if (isElement(value)) { return value; }
else {
return (option in current && isElement(current[option])
? current[option]
: defaults[option]);
}
}
}
return null;
},
/*\
* Interactable.element
[ method ]
*
* If this is not a selector Interactable, it returns the element this
* interactable represents
*
= (Element) HTML / SVG Element
\*/
element: function () {
return this._element;
},
/*\
* Interactable.fire
[ method ]
*
* Calls listeners for the given InteractEvent type bound globally
* and directly to this Interactable
*
- iEvent (InteractEvent) The InteractEvent object to be fired on this Interactable
= (Interactable) this Interactable
\*/
fire: function (iEvent) {
if (!(iEvent && iEvent.type) || !contains(eventTypes, iEvent.type)) {
return this;
}
var listeners,
i,
len,
onEvent = 'on' + iEvent.type,
funcName = '';
// Interactable#on() listeners
if (iEvent.type in this._iEvents) {
listeners = this._iEvents[iEvent.type];
for (i = 0, len = listeners.length; i < len && !iEvent.immediatePropagationStopped; i++) {
funcName = listeners[i].name;
listeners[i](iEvent);
}
}
// interactable.onevent listener
if (isFunction(this[onEvent])) {
funcName = this[onEvent].name;
this[onEvent](iEvent);
}
// interact.on() listeners
if (iEvent.type in globalEvents && (listeners = globalEvents[iEvent.type])) {
for (i = 0, len = listeners.length; i < len && !iEvent.immediatePropagationStopped; i++) {
funcName = listeners[i].name;
listeners[i](iEvent);
}
}
return this;
},
/*\
* Interactable.on
[ method ]
*
* Binds a listener for an InteractEvent or DOM event.
*
- eventType (string | array) The type of event or array of types to listen for
- listener (function) The function to be called on the given event(s)
- useCapture (boolean) #optional useCapture flag for addEventListener
= (object) This Interactable
\*/
on: function (eventType, listener, useCapture) {
var i;
if (isArray(eventType)) {
for (i = 0; i < eventType.length; i++) {
this.on(eventType[i], listener, useCapture);
}
return this;
}
if (eventType === 'wheel') {
eventType = wheelEvent;
}
// convert to boolean
useCapture = useCapture? true: false;
if (contains(eventTypes, eventType)) {
// if this type of event was never bound to this Interactable
if (!(eventType in this._iEvents)) {
this._iEvents[eventType] = [listener];
}
else {
this._iEvents[eventType].push(listener);
}
}
// delegated event for selector
else if (this.selector) {
if (!delegatedEvents[eventType]) {
delegatedEvents[eventType] = {
selectors: [],
contexts : [],
listeners: []
};
// add delegate listener functions
for (i = 0; i < documents.length; i++) {
events.add(documents[i], eventType, delegateListener);
events.add(documents[i], eventType, delegateUseCapture, true);
}
}
var delegated = delegatedEvents[eventType],
index;
for (index = delegated.selectors.length - 1; index >= 0; index--) {
if (delegated.selectors[index] === this.selector
&& delegated.contexts[index] === this._context) {
break;
}
}
if (index === -1) {
index = delegated.selectors.length;
delegated.selectors.push(this.selector);
delegated.contexts .push(this._context);
delegated.listeners.push([]);
}
// keep listener and useCapture flag
delegated.listeners[index].push([listener, useCapture]);
}
else {
events.add(this._element, eventType, listener, useCapture);
}
return this;
},
/*\
* Interactable.off
[ method ]
*
* Removes an InteractEvent or DOM event listener
*
- eventType (string | array) The type of event or array of types that were listened for
- listener (function) The listener function to be removed
- useCapture (boolean) #optional useCapture flag for removeEventListener
= (object) This Interactable
\*/
off: function (eventType, listener, useCapture) {
var i;
if (isArray(eventType)) {
for (i = 0; i < eventType.length; i++) {
this.off(eventType[i], listener, useCapture);
}
return this;
}
var eventList,
index = -1;
// convert to boolean
useCapture = useCapture? true: false;
if (eventType === 'wheel') {
eventType = wheelEvent;
}
// if it is an action event type
if (contains(eventTypes, eventType)) {
eventList = this._iEvents[eventType];
if (eventList && (index = indexOf(eventList, listener)) !== -1) {
this._iEvents[eventType].splice(index, 1);
}
}
// delegated event
else if (this.selector) {
var delegated = delegatedEvents[eventType],
matchFound = false;
if (!delegated) { return this; }
// count from last index of delegated to 0
for (index = delegated.selectors.length - 1; index >= 0; index--) {
// look for matching selector and context Node
if (delegated.selectors[index] === this.selector
&& delegated.contexts[index] === this._context) {
var listeners = delegated.listeners[index];
// each item of the listeners array is an array: [function, useCaptureFlag]
for (i = listeners.length - 1; i >= 0; i--) {
var fn = listeners[i][0],
useCap = listeners[i][1];
// check if the listener functions and useCapture flags match
if (fn === listener && useCap === useCapture) {
// remove the listener from the array of listeners
listeners.splice(i, 1);
// if all listeners for this interactable have been removed
// remove the interactable from the delegated arrays
if (!listeners.length) {
delegated.selectors.splice(index, 1);
delegated.contexts .splice(index, 1);
delegated.listeners.splice(index, 1);
// remove delegate function from context
events.remove(this._context, eventType, delegateListener);
events.remove(this._context, eventType, delegateUseCapture, true);
// remove the arrays if they are empty
if (!delegated.selectors.length) {
delegatedEvents[eventType] = null;
}
}
// only remove one listener
matchFound = true;
break;
}
}
if (matchFound) { break; }
}
}
}
// remove listener from this Interatable's element
else {
events.remove(this, listener, useCapture);
}
return this;
},
/*\
* Interactable.set
[ method ]
*
* Reset the options of this Interactable
- options (object) The new settings to apply
= (object) This Interactablw
\*/
set: function (options) {
if (!options || !isObject(options)) {
options = {};
}
this.options = new IOptions(options);
this.draggable ('draggable' in options? options.draggable : this.options.draggable );
this.dropzone ('dropzone' in options? options.dropzone : this.options.dropzone );
this.resizable ('resizable' in options? options.resizable : this.options.resizable );
this.gesturable('gesturable' in options? options.gesturable: this.options.gesturable);
var settings = [
'accept', 'actionChecker', 'allowFrom', 'autoScroll', 'deltaSource',
'dropChecker', 'ignoreFrom', 'inertia', 'origin', 'preventDefault',
'rectChecker', 'restrict', 'snap', 'styleCursor'
];
for (var i = 0, len = settings.length; i < len; i++) {
var setting = settings[i];
if (setting in options) {
this[setting](options[setting]);
}
}
return this;
},
/*\
* Interactable.unset
[ method ]
*
* Remove this interactable from the list of interactables and remove
* it's drag, drop, resize and gesture capabilities
*
= (object) @interact
\*/
unset: function () {
events.remove(this, 'all');
if (!isString(this.selector)) {
events.remove(this, 'all');
if (this.options.styleCursor) {
this._element.style.cursor = '';
}
}
else {
// remove delegated events
for (var type in delegatedEvents) {
var delegated = delegatedEvents[type];
for (var i = 0; i < delegated.selectors.length; i++) {
if (delegated.selectors[i] === this.selector
&& delegated.contexts[i] === this._context) {
delegated.selectors.splice(i, 1);
delegated.contexts .splice(i, 1);
delegated.listeners.splice(i, 1);
// remove the arrays if they are empty
if (!delegated.selectors.length) {
delegatedEvents[type] = null;
}
}
events.remove(this._context, type, delegateListener);
events.remove(this._context, type, delegateUseCapture, true);
break;
}
}
}
this.dropzone(false);
interactables.splice(indexOf(interactables, this), 1);
return interact;
}
};
Interactable.prototype.gestureable = Interactable.prototype.gesturable;
Interactable.prototype.resizeable = Interactable.prototype.resizable;
/*\
* interact.isSet
[ method ]
*
* Check if an element has been set
- element (Element) The Element being searched for
= (boolean) Indicates if the element or CSS selector was previously passed to interact
\*/
interact.isSet = function(element, options) {
return interactables.indexOfElement(element, options && options.context) !== -1;
};
/*\
* interact.on
[ method ]
*
* Adds a global listener for an InteractEvent or adds a DOM event to
* `document`
*
- type (string | array) The type of event or array of types to listen for
- listener (function) The function to be called on the given event(s)
- useCapture (boolean) #optional useCapture flag for addEventListener
= (object) interact
\*/
interact.on = function (type, listener, useCapture) {
if (isArray(type)) {
for (var i = 0; i < type.length; i++) {
interact.on(type[i], listener, useCapture);
}
return interact;
}
// if it is an InteractEvent type, add listener to globalEvents
if (contains(eventTypes, type)) {
// if this type of event was never bound
if (!globalEvents[type]) {
globalEvents[type] = [listener];
}
else {
globalEvents[type].push(listener);
}
}
// If non InteractEvent type, addEventListener to document
else {
events.add(document, type, listener, useCapture);
}
return interact;
};
/*\
* interact.off
[ method ]
*
* Removes a global InteractEvent listener or DOM event from `document`
*
- type (string | array) The type of event or array of types that were listened for
- listener (function) The listener function to be removed
- useCapture (boolean) #optional useCapture flag for removeEventListener
= (object) interact
\*/
interact.off = function (type, listener, useCapture) {
if (isArray(type)) {
for (var i = 0; i < type.length; i++) {
interact.off(type[i], listener, useCapture);
}
return interact;
}
if (!contains(eventTypes, type)) {
events.remove(document, type, listener, useCapture);
}
else {
var index;
if (type in globalEvents
&& (index = indexOf(globalEvents[type], listener)) !== -1) {
globalEvents[type].splice(index, 1);
}
}
return interact;
};
/*\
* interact.simulate
[ method ]
*
* Simulate pointer down to begin to interact with an interactable element
- action (string) The action to be performed - drag, resize, etc.
- element (Element) The DOM Element to resize/drag
- pointerEvent (object) #optional Pointer event whose pageX/Y coordinates will be the starting point of the interact drag/resize
= (object) interact
\*/
interact.simulate = function (action, element, pointerEvent) {
var event = {},
clientRect;
if (action === 'resize') {
action = 'resizexy';
}
// return if the action is not recognised
if (!/^(drag|resizexy|resizex|resizey)$/.test(action)) {
return interact;
}
if (pointerEvent) {
extend(event, pointerEvent);
}
else {
clientRect = (element instanceof SVGElement)
? element.getBoundingClientRect()
: clientRect = element.getClientRects()[0];
if (action === 'drag') {
event.pageX = clientRect.left + clientRect.width / 2;
event.pageY = clientRect.top + clientRect.height / 2;
}
else {
event.pageX = clientRect.right;
event.pageY = clientRect.bottom;
}
}
event.target = event.currentTarget = element;
event.preventDefault = event.stopPropagation = blank;
listeners.pointerDown(event, action);
return interact;
};
/*\
* interact.enableDragging
[ method ]
*
* Returns or sets whether dragging is enabled for any Interactables
*
- newValue (boolean) #optional `true` to allow the action; `false` to disable action for all Interactables
= (boolean | object) The current setting or interact
\*/
interact.enableDragging = function (newValue) {
if (newValue !== null && newValue !== undefined) {
actionIsEnabled.drag = newValue;
return interact;
}
return actionIsEnabled.drag;
};
/*\
* interact.enableResizing
[ method ]
*
* Returns or sets whether resizing is enabled for any Interactables
*
- newValue (boolean) #optional `true` to allow the action; `false` to disable action for all Interactables
= (boolean | object) The current setting or interact
\*/
interact.enableResizing = function (newValue) {
if (newValue !== null && newValue !== undefined) {
actionIsEnabled.resize = newValue;
return interact;
}
return actionIsEnabled.resize;
};
/*\
* interact.enableGesturing
[ method ]
*
* Returns or sets whether gesturing is enabled for any Interactables
*
- newValue (boolean) #optional `true` to allow the action; `false` to disable action for all Interactables
= (boolean | object) The current setting or interact
\*/
interact.enableGesturing = function (newValue) {
if (newValue !== null && newValue !== undefined) {
actionIsEnabled.gesture = newValue;
return interact;
}
return actionIsEnabled.gesture;
};
interact.eventTypes = eventTypes;
/*\
* interact.debug
[ method ]
*
* Returns debugging data
= (object) An object with properties that outline the current state and expose internal functions and variables
\*/
interact.debug = function () {
var interaction = interactions[0] || new Interaction();
return {
interactions : interactions,
target : interaction.target,
dragging : interaction.dragging,
resizing : interaction.resizing,
gesturing : interaction.gesturing,
prepared : interaction.prepared,
matches : interaction.matches,
matchElements : interaction.matchElements,
prevCoords : interaction.prevCoords,
startCoords : interaction.startCoords,
pointerIds : interaction.pointerIds,
pointers : interaction.pointers,
addPointer : listeners.addPointer,
removePointer : listeners.removePointer,
recordPointer : listeners.recordPointer,
snap : interaction.snapStatus,
restrict : interaction.restrictStatus,
inertia : interaction.inertiaStatus,
downTime : interaction.downTimes[0],
downEvent : interaction.downEvent,
downPointer : interaction.downPointer,
prevEvent : interaction.prevEvent,
Interactable : Interactable,
IOptions : IOptions,
interactables : interactables,
pointerIsDown : interaction.pointerIsDown,
defaultOptions : defaultOptions,
defaultActionChecker : defaultActionChecker,
actionCursors : actionCursors,
dragMove : listeners.dragMove,
resizeMove : listeners.resizeMove,
gestureMove : listeners.gestureMove,
pointerUp : listeners.pointerUp,
pointerDown : listeners.pointerDown,
pointerMove : listeners.pointerMove,
pointerHover : listeners.pointerHover,
events : events,
globalEvents : globalEvents,
delegatedEvents : delegatedEvents
};
};
// expose the functions used to calculate multi-touch properties
interact.getTouchAverage = touchAverage;
interact.getTouchBBox = touchBBox;
interact.getTouchDistance = touchDistance;
interact.getTouchAngle = touchAngle;
interact.getElementRect = getElementRect;
interact.matchesSelector = matchesSelector;
interact.closest = closest;
/*\
* interact.margin
[ method ]
*
* Returns or sets the margin for autocheck resizing used in
* @Interactable.getAction. That is the distance from the bottom and right
* edges of an element clicking in which will start resizing
*
- newValue (number) #optional
= (number | interact) The current margin value or interact
\*/
interact.margin = function (newvalue) {
if (isNumber(newvalue)) {
margin = newvalue;
return interact;
}
return margin;
};
/*\
* interact.styleCursor
[ styleCursor ]
*
* Returns or sets whether the cursor style of the document is changed
* depending on what action is being performed
*
- newValue (boolean) #optional
= (boolean | interact) The current setting of interact
\*/
interact.styleCursor = function (newValue) {
if (isBool(newValue)) {
defaultOptions.styleCursor = newValue;
return interact;
}
return defaultOptions.styleCursor;
};
/*\
* interact.autoScroll
[ method ]
*
* Returns or sets whether or not actions near the edges of the window or
* specified container element trigger autoScroll by default
*
- options (boolean | object) true or false to simply enable or disable or an object with margin, distance, container and interval properties
= (object) interact
* or
= (boolean | object) `false` if autoscroll is disabled and the default autoScroll settings if it is enabled
\*/
interact.autoScroll = function (options) {
var defaults = defaultOptions.autoScroll;
if (isObject(options)) {
defaultOptions.autoScrollEnabled = true;
if (isNumber(options.margin)) { defaults.margin = options.margin;}
if (isNumber(options.speed) ) { defaults.speed = options.speed ;}
defaults.container =
(isElement(options.container) || isWindow(options.container)
? options.container
: defaults.container);
return interact;
}
if (isBool(options)) {
defaultOptions.autoScrollEnabled = options;
return interact;
}
// return the autoScroll settings if autoScroll is enabled
// otherwise, return false
return defaultOptions.autoScrollEnabled? defaults: false;
};
/*\
* interact.snap
[ method ]
*
* Returns or sets whether actions are constrained to a grid or a
* collection of coordinates
*
- options (boolean | object) #optional New settings
* `true` or `false` to simply enable or disable
* or an object with some of the following properties
o {
o mode : 'grid', 'anchor' or 'path',
o range : the distance within which snapping to a point occurs,
o actions: ['drag', 'resizex', 'resizey', 'resizexy'], an array of action types that can snapped (['drag'] by default) (no gesture)
o grid : {
o x, y: the distances between the grid lines,
o },
o gridOffset: {
o x, y: the x/y-axis values of the grid origin
o },
o anchors: [
o {
o x: x coordinate to snap to,
o y: y coordinate to snap to,
o range: optional range for this anchor
o }
o {
o another anchor
o }
o ]
o }
*
= (object | interact) The default snap settings object or interact
\*/
interact.snap = function (options) {
var snap = defaultOptions.snap;
if (isObject(options)) {
defaultOptions.snapEnabled = true;
if (isString(options.mode) ) { snap.mode = options.mode; }
if (isBool (options.endOnly) ) { snap.endOnly = options.endOnly; }
if (isNumber(options.range) ) { snap.range = options.range; }
if (isArray (options.actions) ) { snap.actions = options.actions; }
if (isArray (options.anchors) ) { snap.anchors = options.anchors; }
if (isObject(options.grid) ) { snap.grid = options.grid; }
if (isObject(options.gridOffset) ) { snap.gridOffset = options.gridOffset; }
if (isObject(options.elementOrigin)) { snap.elementOrigin = options.elementOrigin; }
return interact;
}
if (isBool(options)) {
defaultOptions.snapEnabled = options;
return interact;
}
return defaultOptions.snapEnabled;
};
/*\
* interact.inertia
[ method ]
*
* Returns or sets inertia settings.
*
* See @Interactable.inertia
*
- options (boolean | object) #optional New settings
* `true` or `false` to simply enable or disable
* or an object of inertia options
= (object | interact) The default inertia settings object or interact
\*/
interact.inertia = function (options) {
var inertia = defaultOptions.inertia;
if (isObject(options)) {
defaultOptions.inertiaEnabled = true;
if (isNumber(options.resistance) ) { inertia.resistance = options.resistance ; }
if (isNumber(options.minSpeed) ) { inertia.minSpeed = options.minSpeed ; }
if (isNumber(options.endSpeed) ) { inertia.endSpeed = options.endSpeed ; }
if (isNumber(options.smoothEndDuration)) { inertia.smoothEndDuration = options.smoothEndDuration; }
if (isBool (options.allowResume) ) { inertia.allowResume = options.allowResume ; }
if (isBool (options.zeroResumeDelta) ) { inertia.zeroResumeDelta = options.zeroResumeDelta ; }
if (isArray (options.actions) ) { inertia.actions = options.actions ; }
return interact;
}
if (isBool(options)) {
defaultOptions.inertiaEnabled = options;
return interact;
}
return {
enabled: defaultOptions.inertiaEnabled,
resistance: inertia.resistance,
minSpeed: inertia.minSpeed,
endSpeed: inertia.endSpeed,
actions: inertia.actions,
allowResume: inertia.allowResume,
zeroResumeDelta: inertia.zeroResumeDelta
};
};
/*\
* interact.supportsTouch
[ method ]
*
= (boolean) Whether or not the browser supports touch input
\*/
interact.supportsTouch = function () {
return supportsTouch;
};
/*\
* interact.supportsPointerEvent
[ method ]
*
= (boolean) Whether or not the browser supports PointerEvents
\*/
interact.supportsPointerEvent = function () {
return supportsPointerEvent;
};
/*\
* interact.currentAction
[ method ]
*
= (string) What action is currently being performed
\*/
interact.currentAction = function () {
for (var i = 0, len = interactions.length; i < len; i++) {
var action = interactions[i].currentAction();
if (action) { return action; }
}
return null;
};
/*\
* interact.stop
[ method ]
*
* Cancels the current interaction
*
- event (Event) An event on which to call preventDefault()
= (object) interact
\*/
interact.stop = function (event) {
for (var i = interactions.length - 1; i > 0; i--) {
interactions[i].stop(event);
}
return interact;
};
/*\
* interact.dynamicDrop
[ method ]
*
* Returns or sets whether the dimensions of dropzone elements are
* calculated on every dragmove or only on dragstart for the default
* dropChecker
*
- newValue (boolean) #optional True to check on each move. False to check only before start
= (boolean | interact) The current setting or interact
\*/
interact.dynamicDrop = function (newValue) {
if (isBool(newValue)) {
//if (dragging && dynamicDrop !== newValue && !newValue) {
//calcRects(dropzones);
//}
dynamicDrop = newValue;
return interact;
}
return dynamicDrop;
};
/*\
* interact.deltaSource
[ method ]
* Returns or sets weather pageX/Y or clientX/Y is used to calculate dx/dy.
*
* See @Interactable.deltaSource
*
- newValue (string) #optional 'page' or 'client'
= (string | Interactable) The current setting or interact
\*/
interact.deltaSource = function (newValue) {
if (newValue === 'page' || newValue === 'client') {
defaultOptions.deltaSource = newValue;
return this;
}
return defaultOptions.deltaSource;
};
/*\
* interact.restrict
[ method ]
*
* Returns or sets the default rectangles within which actions (after snap
* calculations) are restricted.
*
* See @Interactable.restrict
*
- newValue (object) #optional an object with keys drag, resize, and/or gesture and rects or Elements as values
= (object) The current restrictions object or interact
\*/
interact.restrict = function (newValue) {
var defaults = defaultOptions.restrict;
if (newValue === undefined) {
return defaultOptions.restrict;
}
if (isBool(newValue)) {
defaultOptions.restrictEnabled = newValue;
}
else if (isObject(newValue)) {
if (isObject(newValue.drag) || /^parent$|^self$/.test(newValue.drag)) {
defaults.drag = newValue.drag;
}
if (isObject(newValue.resize) || /^parent$|^self$/.test(newValue.resize)) {
defaults.resize = newValue.resize;
}
if (isObject(newValue.gesture) || /^parent$|^self$/.test(newValue.gesture)) {
defaults.gesture = newValue.gesture;
}
if (isBool(newValue.endOnly)) {
defaults.endOnly = newValue.endOnly;
}
if (isObject(newValue.elementRect)) {
defaults.elementRect = newValue.elementRect;
}
defaultOptions.restrictEnabled = true;
}
else if (newValue === null) {
defaults.drag = defaults.resize = defaults.gesture = null;
defaults.endOnly = false;
}
return this;
};
/*\
* interact.pointerMoveTolerance
[ method ]
* Returns or sets the distance the pointer must be moved before an action
* sequence occurs. This also affects tolerance for tap events.
*
- newValue (number) #optional The movement from the start position must be greater than this value
= (number | Interactable) The current setting or interact
\*/
interact.pointerMoveTolerance = function (newValue) {
if (isNumber(newValue)) {
defaultOptions.pointerMoveTolerance = newValue;
return this;
}
return defaultOptions.pointerMoveTolerance;
};
/*\
* interact.maxInteractions
[ method ]
**
* Returns or sets the maximum number of concurrent interactions allowed.
* By default only 1 interaction is allowed at a time (for backwards
* compatibility). To allow multiple interactions on the same Interactables
* and elements, you need to enable it in the draggable, resizable and
* gesturable `'max'` and `'maxPerElement'` options.
**
- newValue (number) #optional Any number. newValue <= 0 means no interactions.
\*/
interact.maxInteractions = function (newValue) {
if (isNumber(newValue)) {
maxInteractions = newValue;
return this;
}
return maxInteractions;
};
function endAllInteractions (event) {
for (var i = 0; i < interactions.length; i++) {
interactions[i].pointerEnd(event, event);
}
}
function listenToDocument (doc) {
if (contains(documents, doc)) { return; }
var win = doc.defaultView || doc.parentWindow;
// add delegate event listener
for (var eventType in delegatedEvents) {
events.add(doc, eventType, delegateListener);
events.add(doc, eventType, delegateUseCapture, true);
}
if (PointerEvent) {
if (PointerEvent === win.MSPointerEvent) {
pEventTypes = {
up: 'MSPointerUp', down: 'MSPointerDown', over: 'mouseover',
out: 'mouseout', move: 'MSPointerMove', cancel: 'MSPointerCancel' };
}
else {
pEventTypes = {
up: 'pointerup', down: 'pointerdown', over: 'pointerover',
out: 'pointerout', move: 'pointermove', cancel: 'pointercancel' };
}
events.add(doc, pEventTypes.down , listeners.selectorDown );
events.add(doc, pEventTypes.move , listeners.pointerMove );
events.add(doc, pEventTypes.over , listeners.pointerOver );
events.add(doc, pEventTypes.out , listeners.pointerOut );
events.add(doc, pEventTypes.up , listeners.pointerUp );
events.add(doc, pEventTypes.cancel, listeners.pointerCancel);
// autoscroll
events.add(doc, pEventTypes.move, autoScroll.edgeMove);
}
else {
events.add(doc, 'mousedown', listeners.selectorDown);
events.add(doc, 'mousemove', listeners.pointerMove );
events.add(doc, 'mouseup' , listeners.pointerUp );
events.add(doc, 'mouseover', listeners.pointerOver );
events.add(doc, 'mouseout' , listeners.pointerOut );
events.add(doc, 'touchstart' , listeners.selectorDown );
events.add(doc, 'touchmove' , listeners.pointerMove );
events.add(doc, 'touchend' , listeners.pointerUp );
events.add(doc, 'touchcancel', listeners.pointerCancel);
// autoscroll
events.add(doc, 'mousemove', autoScroll.edgeMove);
events.add(doc, 'touchmove', autoScroll.edgeMove);
}
events.add(win, 'blur', endAllInteractions);
try {
if (win.frameElement) {
var parentDoc = win.frameElement.ownerDocument,
parentWindow = parentDoc.defaultView;
events.add(parentDoc , 'mouseup' , listeners.pointerEnd);
events.add(parentDoc , 'touchend' , listeners.pointerEnd);
events.add(parentDoc , 'touchcancel' , listeners.pointerEnd);
events.add(parentDoc , 'pointerup' , listeners.pointerEnd);
events.add(parentDoc , 'MSPointerUp' , listeners.pointerEnd);
events.add(parentWindow, 'blur' , endAllInteractions );
}
}
catch (error) {
interact.windowParentError = error;
}
// For IE's lack of Event#preventDefault
if (events.useAttachEvent) {
events.add(doc, 'selectstart', function (event) {
var interaction = interactions[0];
if (interaction.currentAction()) {
interaction.checkAndPreventDefault(event);
}
});
}
documents.push(doc);
}
listenToDocument(document);
function indexOf (array, target) {
for (var i = 0, len = array.length; i < len; i++) {
if (array[i] === target) {
return i;
}
}
return -1;
}
function contains (array, target) {
return indexOf(array, target) !== -1;
}
function matchesSelector (element, selector, nodeList) {
if (ie8MatchesSelector) {
return ie8MatchesSelector(element, selector, nodeList);
}
return element[prefixedMatchesSelector](selector);
}
// For IE8's lack of an Element#matchesSelector
// taken from http://tanalin.com/en/blog/2012/12/matches-selector-ie8/ and modified
if (!(prefixedMatchesSelector in Element.prototype) || !isFunction(Element.prototype[prefixedMatchesSelector])) {
ie8MatchesSelector = function (element, selector, elems) {
elems = elems || element.parentNode.querySelectorAll(selector);
for (var i = 0, len = elems.length; i < len; i++) {
if (elems[i] === element) {
return true;
}
}
return false;
};
}
// requestAnimationFrame polyfill
(function() {
var lastTime = 0,
vendors = ['ms', 'moz', 'webkit', 'o'];
for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
reqFrame = window[vendors[x]+'RequestAnimationFrame'];
cancelFrame = window[vendors[x]+'CancelAnimationFrame'] || window[vendors[x]+'CancelRequestAnimationFrame'];
}
if (!reqFrame) {
reqFrame = function(callback) {
var currTime = new Date().getTime(),
timeToCall = Math.max(0, 16 - (currTime - lastTime)),
id = window.setTimeout(function() { callback(currTime + timeToCall); },
timeToCall);
lastTime = currTime + timeToCall;
return id;
};
}
if (!cancelFrame) {
cancelFrame = function(id) {
clearTimeout(id);
};
}
}());
/* global exports: true, module, define */
// http://documentcloud.github.io/underscore/docs/underscore.html#section-11
if (typeof exports !== 'undefined') {
if (typeof module !== 'undefined' && module.exports) {
exports = module.exports = interact;
}
exports.interact = interact;
}
// AMD
else if (typeof define === 'function' && define.amd) {
define('interact', function() {
return interact;
});
}
else {
window.interact = interact;
}
} ());<|fim▁end|> | |
<|file_name|>test_signals.py<|end_file_name|><|fim▁begin|>import asyncio
from unittest import mock
import pytest
from multidict import CIMultiDict
from aiohttp.signals import Signal
from aiohttp.test_utils import make_mocked_request
from aiohttp.web import Application, Response
@pytest.fixture
def app():
return Application()
@pytest.fixture
def debug_app():
return Application(debug=True)
def make_request(app, method, path, headers=CIMultiDict()):
return make_mocked_request(method, path, headers, app=app)
async def test_add_signal_handler_not_a_callable(app):
callback = True
app.on_response_prepare.append(callback)
with pytest.raises(TypeError):
await app.on_response_prepare(None, None)
async def test_function_signal_dispatch(app):
signal = Signal(app)
kwargs = {'foo': 1, 'bar': 2}
callback_mock = mock.Mock()
@asyncio.coroutine
def callback(**kwargs):
callback_mock(**kwargs)
signal.append(callback)
await signal.send(**kwargs)
callback_mock.assert_called_once_with(**kwargs)
async def test_function_signal_dispatch2(app):
signal = Signal(app)
args = {'a', 'b'}
kwargs = {'foo': 1, 'bar': 2}
callback_mock = mock.Mock()
@asyncio.coroutine
def callback(*args, **kwargs):
callback_mock(*args, **kwargs)
signal.append(callback)
await signal.send(*args, **kwargs)
callback_mock.assert_called_once_with(*args, **kwargs)
async def test_response_prepare(app):<|fim▁hole|>
@asyncio.coroutine
def cb(*args, **kwargs):
callback(*args, **kwargs)
app.on_response_prepare.append(cb)
request = make_request(app, 'GET', '/')
response = Response(body=b'')
await response.prepare(request)
callback.assert_called_once_with(request, response)
async def test_non_coroutine(app):
signal = Signal(app)
kwargs = {'foo': 1, 'bar': 2}
callback = mock.Mock()
signal.append(callback)
await signal.send(**kwargs)
callback.assert_called_once_with(**kwargs)
async def test_debug_signal(debug_app):
assert debug_app.debug, "Should be True"
signal = Signal(debug_app)
callback = mock.Mock()
pre = mock.Mock()
post = mock.Mock()
signal.append(callback)
debug_app.on_pre_signal.append(pre)
debug_app.on_post_signal.append(post)
await signal.send(1, a=2)
callback.assert_called_once_with(1, a=2)
pre.assert_called_once_with(1, 'aiohttp.signals:Signal', 1, a=2)
post.assert_called_once_with(1, 'aiohttp.signals:Signal', 1, a=2)
def test_setitem(app):
signal = Signal(app)
m1 = mock.Mock()
signal.append(m1)
assert signal[0] is m1
m2 = mock.Mock()
signal[0] = m2
assert signal[0] is m2
def test_delitem(app):
signal = Signal(app)
m1 = mock.Mock()
signal.append(m1)
assert len(signal) == 1
del signal[0]
assert len(signal) == 0
def test_cannot_append_to_frozen_signal(app):
signal = Signal(app)
m1 = mock.Mock()
m2 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
signal.append(m2)
assert list(signal) == [m1]
def test_cannot_setitem_in_frozen_signal(app):
signal = Signal(app)
m1 = mock.Mock()
m2 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
signal[0] = m2
assert list(signal) == [m1]
def test_cannot_delitem_in_frozen_signal(app):
signal = Signal(app)
m1 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
del signal[0]
assert list(signal) == [m1]<|fim▁end|> | callback = mock.Mock() |
<|file_name|>Util.TypeChecker.test.js<|end_file_name|><|fim▁begin|>module.exports = (function () {
var TypeChecker = Cactus.Util.TypeChecker;
var JSON = Cactus.Util.JSON;
var stringify = JSON.stringify;
var object = Cactus.Addon.Object;
var collection = Cactus.Data.Collection;
var gettype = TypeChecker.gettype.bind(TypeChecker);
return {
"null and undefined" : function () {
var o = new TypeChecker({
type : "number"
});
exception(/Expected "number", but got undefined \(type "undefined"\)/,
o.parse.bind(o, undefined));
exception(/Expected "number", but got null \(type "null"\)/,
o.parse.bind(o, null));
},
"required values" : function () {
var o = new TypeChecker({
required : false,
type : "boolean"
});
equal(true, o.parse(true));
o.parse(null);
},
"default value" : function () {
var o = new TypeChecker({
type : "number",
defaultValue : 3
});
assert.eql(3, o.parse(null));
assert.eql(4, o.parse(4));
o = new TypeChecker({
type : {
a : {
type : "number",
defaultValue : 1
}
}
});
var h = o.parse({ a : 2 });
assert.eql(2, o.parse({ a : 2}).a);
assert.eql(1, o.parse({ a : null }).a);
assert.eql(1, o.parse({}).a);
o = new TypeChecker({
type : {
x : {
type : "boolean",
defaultValue : false
}
}
});
eql({ x : true }, o.parse({ x : true }));
eql({ x : false }, o.parse({ x : false }));
eql({ x : false }, o.parse({}));
// When not passing bool properties.
o = new TypeChecker({
type : {
b : { type : "boolean", defaultValue : false }
}
});
eql({ b : false }, o.parse({}));
// Default values with incorrect types should have special error message (always throw error)
exception(/Expected "boolean", but got 1/, function () {
return new TypeChecker({
type : "boolean",
defaultValue : 1
});
});
},
defaultValueFunc : function () {
var o = new TypeChecker({
defaultValueFunc : function () { return 1; },
type : "number"
});
assert.strictEqual(1, o.parse(null));
o = new TypeChecker({
type : {<|fim▁hole|> a : {
defaultValueFunc : function () { return 2; },
type : "number"
}
}
});
assert.strictEqual(2, o.parse({ a : null }).a);
assert.strictEqual(2, o.parse({}).a);
// defaultValueFunc return value must match type.
exception(/expected "boolean", but got 1/i,
function () {
return new TypeChecker({
defaultValueFunc : function () { return 1; },
type : "boolean"
}).parse(undefined);
});
exception(/expected "boolean", but got 1/i,
function () {
return new TypeChecker({
type : {
a : {
defaultValueFunc : function () { return 1; },
type : "boolean"
}
}
}).parse({});
});
},
validators : function () {
var o = new TypeChecker({
type : "number",
validators : [{
func : function (v) {
return v > 0;
}
}]
});
o.parse(1);
o.parse(0, false);
eql({
"" : ["Validation failed: got 0."]
}, o.getErrors());
// Validation error message.
o = new TypeChecker({
type : "number",
validators : [{
func : function (v) {
return v > 0;
},
message : "Expected positive number."
}]
});
o.parse(1);
exception(/TypeChecker: Error: Expected positive number./,
o.parse.bind(o, 0));
eql({
"" : ["Expected positive number."]
}, o.getErrors());
// Multiple ordered validators.
o = new TypeChecker({
type : "number",
validators : [{
func : function (v) {
return v > -1;
},
message : "Expected number bigger than -1."
}, {
func : function (v) {
return v < 1;
},
message : "Expected number smaller than 1."
}]
});
o.parse(0);
exception(/Expected number bigger than -1/i, o.parse.bind(o, -1));
exception(/Expected number smaller than 1/i, o.parse.bind(o, 1));
o = new TypeChecker({
type : "number",
validators : [{
func : function (v) {
return v > 0;
},
message : "Expected number bigger than 0."
}, {
func : function (v) {
return v > 1;
},
message : "Expected number bigger than 1."
}]
});
o.parse(2);
exception(/bigger than 0.+ bigger than 1./i, o.parse.bind(o, 0));
},
"simple interface" : function () {
var o = TypeChecker.simple("number");
o.parse(1);
o = TypeChecker.simple(["number"]);
o.parse([1]);
o = TypeChecker.simple({
a : "number",
b : "boolean"
});
o.parse({
a : 1,
b : true
});
o = TypeChecker.simple({
a : ["number"]
});
o.parse({
a : [1]
});
o = TypeChecker.simple({
a : {
b : "boolean"
}
});
o.parse({
a : {
b : true
}
});
// Classes.
Class("X");
o = TypeChecker.simple({ _type : X });
o.parse(new X());
o = TypeChecker.simple({
a : { _type : X }
});
o.parse({
a : new X()
});
},
errorHash : function () {
var o = TypeChecker.simple("string");
exception(/Nothing parsed/i, o.hasErrors.bind(o));
exception(/Nothing parsed/i, o.getErrors.bind(o));
o.parse("x", false);
exception(/No errors exist/, o.getErrors.bind(o));
o.parse(1, false);
ok(o.hasErrors());
var errors = o.getErrors();
ok(o.hasErrorsFor(""));
not(o.hasErrorsFor("foo"));
eql({ "" : ['Expected "string", but got 1 (type "number")'] }, o.getErrors());
o = TypeChecker.simple({
a : "number",
b : "number"
});
o.parse({
a : "x",
b : true
}, false);
ok(o.hasErrors());
eql({
a : ['Expected "number", but got "x" (type "string")'],
b : ['Expected "number", but got true (type "boolean")']
}, o.getErrors());
ok(o.hasErrorsFor("a"));
ok(o.hasErrorsFor("b"));
eql(['Expected "number", but got "x" (type "string")'], o.getErrorsFor("a"));
eql(['Expected "number", but got true (type "boolean")'], o.getErrorsFor("b"));
o = new TypeChecker({
type : "number",
validators : [{
func : Function.returning(false),
message : "false"
}]
});
o.parse(1, false);
eql({
"" : ["false"]
}, o.getErrors());
// Errors for array validators.
o = new TypeChecker({
type : {
p : {
type : "string",
validators : [{
func : Function.returning(false),
message : "Error #1."
}, {
func : Function.returning(false),
message : "Error #2."
}]
}
}
});
o.parse({ p : "" }, false);
eql({ p : ["Error #1.", "Error #2."] }, o.getErrors());
// When Error is thrown, there should be a hash property with the error
// messages as well.
o = new TypeChecker({
type : "string"
});
o.parse(1, false);
assert.throws(o.parse.bind(o, 1), function (e) {
assert.ok(/expected "string".+got 1 \(type "number"\)/i.test(e.message));
assert.ok("hash" in e, "Missing hash property");
eql({
"" : ['Expected "string", but got 1 (type "number")']
}, e.hash);
return true;
});
},
validators2 : function () {
// Validators should run only if all other validations pass.
var ran = false;
var o = new TypeChecker({
type : "number",
defaultValue : 0,
validators : [{
func : function (v) {
ran = true;
return v === 0;
},
message : "Only way to validate is to send null or 0."
}]
});
o.parse(0);
o.parse("x", false);
eql({
"" : [
'Expected "number", but got "x" (type "string")',
'Only way to validate is to send null or 0.'
]
}, o.getErrors());
// Do not run validators if constraints fail.
o.parse("x", false);
assert.strictEqual(1, object.count(o.getErrors()));
o.parse(-1, false);
eql({
"" : ["Only way to validate is to send null or 0."]
}, o.getErrors());
// Default value should be applied before validation as well.
ran = false;
assert.strictEqual(0, o.parse(null));
assert.ok(ran, "Validation did not run.");
},
"predefined validations" : function () {
var o = new TypeChecker({
type : "number",
validators : ["natural"]
});
o.parse(1);
o.parse(0);
o.parse(-1, false);
eql({ "" : ["Expected natural number."] }, o.getErrors());
o = new TypeChecker({
type : "number",
validators : ["positive"]
});
o.parse(1);
o.parse(0, false);
eql({ "" : ["Expected positive number."] }, o.getErrors());
o = new TypeChecker({
type : "number",
validators : ["negative"]
});
o.parse(-1);
o.parse(0, false);
eql({ "" : ["Expected negative number."] }, o.getErrors());
o = new TypeChecker({
type : "number",
validators : ["x"]
});
exception(/Undefined built in validator "x"/i, o.parse.bind(o, 1));
o = new TypeChecker({
type : {
a : {
type : "number",
validators : ["x"]
}
}
});
exception(/Undefined built in validator "x"/i, o.parse.bind(o, { a : 1 }));
o = new TypeChecker({
type : "string",
validators : ["non empty string"]
});
o.parse("x");
o.parse("", false);
eql({ "" : ["Expected non-empty string."] }, o.getErrors());
},
T_Array : function () {
eql([{ type : "string" }], gettype(new TypeChecker.types.T_Array({ type : "string" })));
var o = new TypeChecker({
type : [{ type : "number" }]
});
eql([1, 2], o.parse([1, 2]));
eql([], o.parse([]));
exception(/Expected \[\{"type":"number"\}\], but got "a" \(type "string"\)/i, o.parse.bind(o, "a"));
exception(/error in property "0": expected "number", but got "a" \(type "string"\)/i, o.parse.bind(o, ["a"]));
exception(/error in property "1": expected "number", but got true \(type "boolean"\)/i, o.parse.bind(o, [1, true]));
exception(/error in property "0": expected "number", but got "a"[\s\S]+error in property "1": expected "number", but got true/i, o.parse.bind(o, ["a", true]));
// Nesting of arrays.
var o = new TypeChecker({
type : [{ type : [{ type : "number" }] }]
});
eql([[1, 2]], o.parse([[1, 2]]));
exception(/^TypeChecker: Error in property "0": expected \[\{"type":"number"\}\], but got 1/i,
o.parse.bind(o, [1, [2, 3]]));
exception(/^TypeChecker: Error in property "1.1": expected "number", but got true/i,
o.parse.bind(o, [[1], [2, true]]));
eql([[]], o.parse([[]]));
eql([], o.parse([]));
// Optional arrays.
o = new TypeChecker({
type : [{
type : "mixed"
}],
defaultValue : []
});
o.parse(null);
// Optional array in hash.
o = new TypeChecker({
type : {
a : {
type : [{
type : "number"
}],
defaultValue : []
}
}
});
o.parse({});
},
T_Primitive : function () {
var o = new TypeChecker({
type : "string"
});
assert.eql("aoeu", o.parse("aoeu"));
exception(/expected "string", but got 1 \(type "number"\)/i, o.parse.bind(o, 1));
exception(/expected "string", but got true \(type "boolean"\)/i, o.parse.bind(o, true));
o = new TypeChecker({
type : "number"
});
assert.eql(100, o.parse(100));
exception(/^TypeChecker: Error: Expected "number", but got "1" \(type "string"\)$/, o.parse.bind(o, "1"));
// Default values
o = new TypeChecker({
type : "boolean",
defaultValue : false
});
o.parse(true);
o.parse(false);
not(o.parse(null));
},
T_Enumerable : function () {
var o = new TypeChecker({
enumerable : [1,2,3]
});
eql(1, o.parse(1));
o.parse(2);
o.parse(3);
exception(/^TypeChecker: Error: Expected a value in \[1,2,3\], but got 0$/, o.parse.bind(o, 0));
exception(/^TypeChecker: Error: Expected a value in \[1,2,3\], but got 4$/, o.parse.bind(o, 4));
eql({ enumerable : [1,2,3] }, gettype(new TypeChecker.types.T_Enumerable([1, 2, 3])));
},
"T_Union" : function () {
var o = new TypeChecker({
union : ["string", "number"]
});
eql(1, o.parse(1));
eql("x", o.parse("x"));
exception(/Expected a Union/, o.parse.bind(o, true));
eql({ union : [
{ type : "string"},
{ type : "number" }
]}, gettype(new TypeChecker.types.T_Union([
{ type : "string" },
{ type : "number" }
])));
},
"T_Instance" : function () {
var Foo2 = Class("Foo2", {});
Class("Bar", {
isa : Foo2
});
var o = new TypeChecker({
type : Foo2
});
var foo2 = new Foo2();
equal(foo2, o.parse(foo2));
o.parse(new Bar());
exception(/Expected an instance of "Foo2", but got value <1> \(type "number"\)/,
o.parse.bind(o, 1));
Class("Baz");
exception(/Expected an instance of "Foo2", but got value <a Baz> \(type "Baz"\)$/,
o.parse.bind(o, new Baz()));
// Non-Joose classes.
function Bax() {
}
function Qux() {
}
Qux.extend(Bax);
o = new TypeChecker({
type : Bax
});
o.parse(new Bax());
o.parse(new Qux());
function Qax() {
}
Qax.prototype.toString = function () {
return "my Qax";
};
exception(/Expected an instance of "Bax", but got value <1> \(type "number"\)/,
o.parse.bind(o, 1));
exception(/Expected an instance of "Bax", but got value <my Qax> \(type "Qax"\)$/,
o.parse.bind(o, new Qax()));
// Anonymous classes.
var F = function () {};
var G = function () {};
o = new TypeChecker({
type : F
});
G.extend(F);
o.parse(new F());
o.parse(new G());
var H = function () {};
H.prototype.toString = Function.returning("my H");
exception(/Expected an instance of "anonymous type", but got value <1>/,
o.parse.bind(o, 1));
exception(/Expected an instance of "anonymous type", but got value <my H> \(type "anonymous type"\)/i,
o.parse.bind(o, new H()));
function I() {}
equal(I, gettype(new TypeChecker.types.T_Instance(I)).type);
},
T_Hash : function () {
var o = new TypeChecker({ x : { type : "boolean" } });
eql({ x : { type : "boolean" } }, gettype(new TypeChecker.types.T_Hash({ x : { type : "boolean" } })));
o = new TypeChecker({
type : {
a : { type : "number" },
b : { type : "boolean" }
}
});
eql({ a : 1, b : true }, o.parse({ a : 1, b : true }));
o = new TypeChecker({
type : {
a : { type : "number" },
b : { type : "boolean" }
}
});
exception(/Expected \{"a":\{"type":"number"\},"b":\{"type":"boolean"\}\}, but got 1/i,
o.parse.bind(o, 1));
exception(/Error in property "a": expected "number", but got "2"/i,
o.parse.bind(o, { a : "2", b : true }));
exception(/Error in property "a": expected "number", but got "2"[\s\S]+Error in property "b": expected "boolean", but got "2"/i,
o.parse.bind(o, { a : "2", b : "2" }));
exception(/Error in property "b": Missing property/,
o.parse.bind(o, { a : 1 }));
exception(/Error in property "c": Property lacks definition/i,
o.parse.bind(o, { a : 1, b : true, c : "1" }));
// With required specified.
o = new TypeChecker({
type : {
name : { type : "string", required : true }
}
});
assert.throws(o.parse.bind(o, {}), function (e) {
assert.ok(/"name": Missing property/.test(e.message));
return true;
});
// Non-required properties.
o = new TypeChecker({
type : {
a : { type : "number", required : false },
b : { type : "boolean", required : false }
}
});
eql({ a : 1, b : false },
o.parse({ a : 1, b : false }));
var h = o.parse({ a : 1, b : undefined });
ok(!("b" in h));
h = o.parse({ a : 1, b : null });
equal(null, h.b);
h = o.parse({ a : undefined, b : undefined });
ok(object.isEmpty(h));
h = o.parse({});
ok(object.isEmpty(h));
// Skip properties not in definition.
o = new TypeChecker({
allowUndefined : true,
type : {
a : { type : "number" }
}
});
o.parse({}, false);
eql({
a : ["Missing property"]
}, o.getErrors());
o.parse({ a : 1 });
o.parse({ a : 1, b : 2 });
o.parse({ b : 2 }, false);
eql({
a : ["Missing property"]
}, o.getErrors());
// Remove skipped props that are undefined.
eql({ a : 1 }, o.parse({ a : 1 }));
},
T_Map : function () {
var o = new TypeChecker({
map : true,
type : "number"
});
eql({ a : 1, b : 1 }, o.parse({ a : 1, b : 1 }));
o.parse({ a : 1, b : false }, false);
eql({ b : ['Expected "number", but got false (type "boolean")'] }, o.getErrors());
},
"T_Mixed" : function () {
var o = new TypeChecker({
type : "mixed"
});
equal(true, o.parse(true));
o.parse("");
o.parse({});
eql([], o.parse([]));
ok(null === o.parse(null));
ok(undefined === o.parse(undefined));
equal("mixed", gettype(new TypeChecker.types.T_Mixed()));
},
"typeof" : function () {
var t = TypeChecker.typeof.bind(TypeChecker);
equal("number", t(1));
equal("boolean", t(true));
equal("undefined", t(undefined));
equal("null", t(null));
equal("Function", t(function () {}));
equal("Object", t({}));
equal("Array", t([]));
Class("JooseClass");
equal("JooseClass", t(new JooseClass()));
function MyClass() {}
equal("MyClass", t(new MyClass()));
var AnonymousClass = function () {};
equal("anonymous type", t(new AnonymousClass));
},
"BUILD errors" : function () {
exception(/Must be a hash/i,
function () { return new TypeChecker(); });
exception(/May only specify one of required, defaultValue and defaultValueFunc/i,
function () { return new TypeChecker({ required : true, defaultValue : 1 }); });
// required or defaultValue or defaultValueFunc
},
helpers : function () {
//var tc = new TypeChecker({
// type : "number",
// validators : [{
// func : function (o, helpers) {
// return !!helpers;
// },
// message : "helpers == false"
// }]
//});
//tc.parse(1, true, true);
//tc.parse(1, false, true);
//eql({
// "" : ["helpers == false"]
//}, tc.getErrors());
},
"recursive definition" : function () {
var o = new TypeChecker({
type : {
// type
type : {
required : false,
type : "mixed",
validators : [{
func : function (v) {
if (typeof v === "string") {
return collection.hasValue(["string", "number", "object", "function", "boolean", "mixed"], v);
} else if (v instanceof Array) {
// Flat check only.
return v.length === 1;
} else if (v instanceof Object) {
// Flat check only.
return true;
}
return false;
}
}]
},
required : {
type : "boolean",
defaultValue : true
},
defaultValue : {
required : false,
type : "mixed"
},
defaultValueFunc : {
required : false,
type : Function
},
validators : {
type : [{
type : {
func : { type : Function },
message : { type : "string" }
}
}],
defaultValue : []
},
enumerable : {
required : false,
type : Array
},
allowUndefined : {
defaultValue : false,
type : "boolean"
}
}
});
o.parse({
type : "string"
});
o.parse({
type : "number"
});
o.parse({
required : false,
type : "number"
});
o.parse({
defaultValue : 3,
type : "number"
});
o.parse({
defaultValue : true,
type : "boolean"
});
o.parse({
defaultValue : 4,
type : "mixed"
});
o.parse({
defaultValue : {},
type : "mixed"
});
o.parse({
defaultValueFunc : function () { return 1; },
type : "number"
});
o.parse({
type : [{ type : "string" }]
});
o.parse({
type : "mixed",
validators : [{
func : Function.empty,
message : "msg"
}]
});
o.parse({
type : "number",
enumerable : [1,2,3]
});
o.parse({
type : {}
});
o.parse({
allowUndefined : true,
type : {}
});
}
};
})();<|fim▁end|> | |
<|file_name|>test_cloud_trace_exporter.py<|end_file_name|><|fim▁begin|># Copyright 2021 The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import unittest
from unittest import mock
import pkg_resources
from google.cloud.trace_v2.types import AttributeValue, BatchWriteSpansRequest
from google.cloud.trace_v2.types import Span as ProtoSpan
from google.cloud.trace_v2.types import TruncatableString
from google.rpc import code_pb2
from google.rpc.status_pb2 import Status
from opentelemetry.exporter.cloud_trace import (
MAX_EVENT_ATTRS,
MAX_LINK_ATTRS,
MAX_NUM_EVENTS,
MAX_NUM_LINKS,
CloudTraceSpanExporter,
_extract_attributes,
_extract_events,
_extract_links,
_extract_resources,
_extract_span_kind,
_extract_status,
_format_attribute_value,
_get_time_from_ns,
_strip_characters,
_truncate_str,
)
from opentelemetry.exporter.cloud_trace.version import __version__
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import Event
from opentelemetry.sdk.trace import _Span as Span
from opentelemetry.trace import Link, SpanContext, SpanKind
from opentelemetry.trace.status import Status as SpanStatus
from opentelemetry.trace.status import StatusCode
# pylint: disable=too-many-public-methods
class TestCloudTraceSpanExporter(unittest.TestCase):
def setUp(self):
self.client_patcher = mock.patch(
"opentelemetry.exporter.cloud_trace.TraceServiceClient"
)
self.client_patcher.start()
def tearDown(self):
self.client_patcher.stop()
@classmethod
def setUpClass(cls):
cls.project_id = "PROJECT"
cls.attributes_variety_pack = {
"str_key": "str_value",
"bool_key": False,
"double_key": 1.421,
"int_key": 123,
}
cls.extracted_attributes_variety_pack = ProtoSpan.Attributes(
attribute_map={
"str_key": AttributeValue(
string_value=TruncatableString(
value="str_value", truncated_byte_count=0
)
),
"bool_key": AttributeValue(bool_value=False),
"double_key": AttributeValue(
string_value=TruncatableString(
value="1.4210", truncated_byte_count=0
)
),
"int_key": AttributeValue(int_value=123),
}
)
cls.agent_code = _format_attribute_value(
"opentelemetry-python {}; google-cloud-trace-exporter {}".format(
_strip_characters(
pkg_resources.get_distribution("opentelemetry-sdk").version
),
_strip_characters(__version__),
)
)
cls.example_trace_id = "6e0c63257de34c92bf9efcd03927272e"
cls.example_span_id = "95bb5edabd45950f"
cls.example_time_in_ns = 1589919268850900051
cls.example_time_stamp = _get_time_from_ns(cls.example_time_in_ns)
cls.str_20kb = "a" * 20 * 1024
cls.str_16kb = "a" * 16 * 1024
cls.str_300 = "a" * 300
cls.str_256 = "a" * 256
cls.str_128 = "a" * 128
def test_constructor_default(self):
exporter = CloudTraceSpanExporter(self.project_id)
self.assertEqual(exporter.project_id, self.project_id)
def test_constructor_explicit(self):
client = mock.Mock()
exporter = CloudTraceSpanExporter(self.project_id, client=client)
self.assertIs(exporter.client, client)
self.assertEqual(exporter.project_id, self.project_id)
def test_export(self):
resource_info = Resource(
{
"cloud.account.id": 123,
"host.id": "host",
"cloud.zone": "US",
"cloud.provider": "gcp",
"gcp.resource_type": "gce_instance",
}
)
span_datas = [
Span(
name="span_name",
context=SpanContext(
trace_id=int(self.example_trace_id, 16),
span_id=int(self.example_span_id, 16),
is_remote=False,
),
parent=None,
kind=SpanKind.INTERNAL,
resource=resource_info,
attributes={"attr_key": "attr_value"},
)
]
cloud_trace_spans = {
"name": "projects/{}/traces/{}/spans/{}".format(
self.project_id, self.example_trace_id, self.example_span_id
),
"span_id": self.example_span_id,
"parent_span_id": None,
"display_name": TruncatableString(
value="span_name", truncated_byte_count=0
),
"attributes": ProtoSpan.Attributes(
attribute_map={
"g.co/r/gce_instance/zone": _format_attribute_value("US"),
"g.co/r/gce_instance/instance_id": _format_attribute_value(
"host"
),
"g.co/r/gce_instance/project_id": _format_attribute_value(
"123"
),
"g.co/agent": self.agent_code,
"attr_key": _format_attribute_value("attr_value"),
}
),
"links": None,
"status": None,
"time_events": None,
"start_time": None,
"end_time": None,
# pylint: disable=no-member
"span_kind": ProtoSpan.SpanKind.INTERNAL,
}
client = mock.Mock()
exporter = CloudTraceSpanExporter(self.project_id, client=client)
exporter.export(span_datas)
self.assertTrue(client.batch_write_spans.called)
client.batch_write_spans.assert_called_with(
request=BatchWriteSpansRequest(
name="projects/{}".format(self.project_id),
spans=[cloud_trace_spans],
)
)
<|fim▁hole|> )
def test_extract_status_code_ok(self):
self.assertEqual(
_extract_status(SpanStatus(status_code=StatusCode.OK)),
Status(code=code_pb2.OK),
)
def test_extract_status_code_error(self):
self.assertEqual(
_extract_status(
SpanStatus(
status_code=StatusCode.ERROR,
description="error_desc",
)
),
Status(code=code_pb2.UNKNOWN, message="error_desc"),
)
def test_extract_status_code_future_added(self):
self.assertEqual(
_extract_status(
SpanStatus(
status_code=mock.Mock(),
)
),
Status(code=code_pb2.UNKNOWN),
)
def test_extract_empty_attributes(self):
self.assertEqual(
_extract_attributes({}, num_attrs_limit=4),
ProtoSpan.Attributes(attribute_map={}),
)
def test_extract_variety_of_attributes(self):
self.assertEqual(
_extract_attributes(
self.attributes_variety_pack, num_attrs_limit=4
),
self.extracted_attributes_variety_pack,
)
def test_extract_label_mapping_attributes(self):
attributes_labels_mapping = {
"http.scheme": "http",
"http.host": "172.19.0.4:8000",
"http.method": "POST",
"http.request_content_length": 321,
"http.response_content_length": 123,
"http.route": "/fuzzy/search",
"http.status_code": 200,
"http.url": "http://172.19.0.4:8000/fuzzy/search",
"http.user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36",
}
extracted_attributes_labels_mapping = ProtoSpan.Attributes(
attribute_map={
"/http/client_protocol": AttributeValue(
string_value=TruncatableString(
value="http", truncated_byte_count=0
)
),
"/http/host": AttributeValue(
string_value=TruncatableString(
value="172.19.0.4:8000", truncated_byte_count=0
)
),
"/http/method": AttributeValue(
string_value=TruncatableString(
value="POST", truncated_byte_count=0
)
),
"/http/request/size": AttributeValue(int_value=321),
"/http/response/size": AttributeValue(int_value=123),
"/http/route": AttributeValue(
string_value=TruncatableString(
value="/fuzzy/search", truncated_byte_count=0
)
),
"/http/status_code": AttributeValue(int_value=200),
"/http/url": AttributeValue(
string_value=TruncatableString(
value="http://172.19.0.4:8000/fuzzy/search",
truncated_byte_count=0,
)
),
"/http/user_agent": AttributeValue(
string_value=TruncatableString(
value="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36",
truncated_byte_count=0,
)
),
}
)
self.assertEqual(
_extract_attributes(attributes_labels_mapping, num_attrs_limit=9),
extracted_attributes_labels_mapping,
)
def test_ignore_invalid_attributes(self):
self.assertEqual(
_extract_attributes(
{"illegal_attribute_value": {}, "legal_attribute": 3},
num_attrs_limit=4,
),
ProtoSpan.Attributes(
attribute_map={"legal_attribute": AttributeValue(int_value=3)},
dropped_attributes_count=1,
),
)
def test_too_many_attributes(self):
too_many_attrs = {}
for attr_key in range(5):
too_many_attrs[str(attr_key)] = 0
proto_attrs = _extract_attributes(too_many_attrs, num_attrs_limit=4)
self.assertEqual(proto_attrs.dropped_attributes_count, 1)
def test_add_agent_attribute(self):
self.assertEqual(
_extract_attributes({}, num_attrs_limit=4, add_agent_attr=True),
ProtoSpan.Attributes(
attribute_map={"g.co/agent": self.agent_code},
dropped_attributes_count=0,
),
)
def test_agent_attribute_priority(self):
# Drop existing attributes in favor of the agent attribute
self.assertEqual(
_extract_attributes(
{"attribute_key": "attr_value"},
num_attrs_limit=1,
add_agent_attr=True,
),
ProtoSpan.Attributes(
attribute_map={"g.co/agent": self.agent_code},
dropped_attributes_count=1,
),
)
def test_attribute_value_truncation(self):
# shouldn't truncate
self.assertEqual(
_format_attribute_value(self.str_300),
AttributeValue(
string_value=TruncatableString(
value=self.str_300,
truncated_byte_count=0,
)
),
)
# huge string should truncate
self.assertEqual(
_format_attribute_value(self.str_20kb),
AttributeValue(
string_value=TruncatableString(
value=self.str_16kb,
truncated_byte_count=(20 - 16) * 1024,
)
),
)
def test_list_attribute_value(self):
self.assertEqual(
_format_attribute_value(("one", "two")),
AttributeValue(
string_value=TruncatableString(
value="one,two", truncated_byte_count=0
)
),
)
self.assertEqual(
_format_attribute_value([True]),
AttributeValue(
string_value=TruncatableString(
value="True", truncated_byte_count=0
)
),
)
self.assertEqual(
_format_attribute_value((2, 5)),
AttributeValue(
string_value=TruncatableString(
value="2,5", truncated_byte_count=0
)
),
)
self.assertEqual(
_format_attribute_value([2.0, 0.5, 4.55]),
AttributeValue(
string_value=TruncatableString(
value="2.0,0.5,4.55", truncated_byte_count=0
)
),
)
def test_attribute_key_truncation(self):
self.assertEqual(
_extract_attributes(
{self.str_300: "attr_value"}, num_attrs_limit=4
),
ProtoSpan.Attributes(
attribute_map={
self.str_128: AttributeValue(
string_value=TruncatableString(
value="attr_value", truncated_byte_count=0
)
)
}
),
)
def test_extract_empty_events(self):
self.assertIsNone(_extract_events([]))
def test_too_many_events(self):
event = Event(
name="event", timestamp=self.example_time_in_ns, attributes={}
)
too_many_events = [event] * (MAX_NUM_EVENTS + 5)
self.assertEqual(
_extract_events(too_many_events),
ProtoSpan.TimeEvents(
time_event=[
{
"time": self.example_time_stamp,
"annotation": {
"description": TruncatableString(
value="event",
),
"attributes": {},
},
},
]
* MAX_NUM_EVENTS,
dropped_annotations_count=len(too_many_events)
- MAX_NUM_EVENTS,
),
)
def test_too_many_event_attributes(self):
event_attrs = {}
for attr_key in range(MAX_EVENT_ATTRS + 5):
event_attrs[str(attr_key)] = 0
proto_events = _extract_events(
[
Event(
name="a",
attributes=event_attrs,
timestamp=self.example_time_in_ns,
)
]
)
self.assertEqual(
len(
proto_events.time_event[0].annotation.attributes.attribute_map
),
MAX_EVENT_ATTRS,
)
self.assertEqual(
proto_events.time_event[
0
].annotation.attributes.dropped_attributes_count,
len(event_attrs) - MAX_EVENT_ATTRS,
)
def test_extract_multiple_events(self):
event1 = Event(
name="event1",
attributes=self.attributes_variety_pack,
timestamp=self.example_time_in_ns,
)
event2_nanos = 1589919438550020326
event2 = Event(
name="event2",
attributes={"illegal_attr_value": dict()},
timestamp=event2_nanos,
)
self.assertEqual(
_extract_events([event1, event2]),
ProtoSpan.TimeEvents(
time_event=[
{
"time": self.example_time_stamp,
"annotation": {
"description": TruncatableString(
value="event1", truncated_byte_count=0
),
"attributes": self.extracted_attributes_variety_pack,
},
},
{
"time": _get_time_from_ns(event2_nanos),
"annotation": {
"description": TruncatableString(
value="event2", truncated_byte_count=0
),
"attributes": ProtoSpan.Attributes(
attribute_map={}, dropped_attributes_count=1
),
},
},
]
),
)
def test_event_name_truncation(self):
event1 = Event(
name=self.str_300, attributes={}, timestamp=self.example_time_in_ns
)
self.assertEqual(
_extract_events([event1]),
ProtoSpan.TimeEvents(
time_event=[
{
"time": self.example_time_stamp,
"annotation": {
"description": TruncatableString(
value=self.str_256,
truncated_byte_count=300 - 256,
),
"attributes": {},
},
},
]
),
)
def test_extract_empty_links(self):
self.assertIsNone(_extract_links([]))
def test_extract_multiple_links(self):
span_id1 = "95bb5edabd45950f"
span_id2 = "b6b86ad2915c9ddc"
link1 = Link(
context=SpanContext(
trace_id=int(self.example_trace_id, 16),
span_id=int(span_id1, 16),
is_remote=False,
),
attributes={},
)
link2 = Link(
context=SpanContext(
trace_id=int(self.example_trace_id, 16),
span_id=int(span_id1, 16),
is_remote=False,
),
attributes=self.attributes_variety_pack,
)
link3 = Link(
context=SpanContext(
trace_id=int(self.example_trace_id, 16),
span_id=int(span_id2, 16),
is_remote=False,
),
attributes={"illegal_attr_value": dict(), "int_attr_value": 123},
)
self.assertEqual(
_extract_links([link1, link2, link3]),
ProtoSpan.Links(
link=[
{
"trace_id": self.example_trace_id,
"span_id": span_id1,
"type": "TYPE_UNSPECIFIED",
"attributes": ProtoSpan.Attributes(attribute_map={}),
},
{
"trace_id": self.example_trace_id,
"span_id": span_id1,
"type": "TYPE_UNSPECIFIED",
"attributes": self.extracted_attributes_variety_pack,
},
{
"trace_id": self.example_trace_id,
"span_id": span_id2,
"type": "TYPE_UNSPECIFIED",
"attributes": {
"attribute_map": {
"int_attr_value": AttributeValue(int_value=123)
},
},
},
]
),
)
def test_extract_link_with_none_attribute(self):
link = Link(
context=SpanContext(
trace_id=int(self.example_trace_id, 16),
span_id=int(self.example_span_id, 16),
is_remote=False,
),
attributes=None,
)
self.assertEqual(
_extract_links([link]),
ProtoSpan.Links(
link=[
{
"trace_id": self.example_trace_id,
"span_id": self.example_span_id,
"type": "TYPE_UNSPECIFIED",
"attributes": ProtoSpan.Attributes(attribute_map={}),
},
]
),
)
def test_too_many_links(self):
link = Link(
context=SpanContext(
trace_id=int(self.example_trace_id, 16),
span_id=int(self.example_span_id, 16),
is_remote=False,
),
attributes={},
)
too_many_links = [link] * (MAX_NUM_LINKS + 5)
self.assertEqual(
_extract_links(too_many_links),
ProtoSpan.Links(
link=[
{
"trace_id": self.example_trace_id,
"span_id": self.example_span_id,
"type": "TYPE_UNSPECIFIED",
"attributes": {},
}
]
* MAX_NUM_LINKS,
dropped_links_count=len(too_many_links) - MAX_NUM_LINKS,
),
)
def test_too_many_link_attributes(self):
link_attrs = {}
for attr_key in range(MAX_LINK_ATTRS + 1):
link_attrs[str(attr_key)] = 0
attr_link = Link(
context=SpanContext(
trace_id=int(self.example_trace_id, 16),
span_id=int(self.example_span_id, 16),
is_remote=False,
),
attributes=link_attrs,
)
proto_link = _extract_links([attr_link])
self.assertEqual(
len(proto_link.link[0].attributes.attribute_map), MAX_LINK_ATTRS
)
def test_extract_empty_resources(self):
self.assertEqual(_extract_resources(Resource.get_empty()), {})
def test_extract_resource_attributes_with_regex(self):
resource_regex = re.compile(r"service\..*")
resource = Resource(
attributes={
"cloud.account.id": 123,
"host.id": "host",
"cloud.zone": "US",
"cloud.provider": "gcp",
"extra_info": "extra",
"gcp.resource_type": "gce_instance",
"not_gcp_resource": "value",
"service.name": "my-app",
"service.version": "1",
}
)
expected_extract = {
"g.co/r/gce_instance/project_id": "123",
"g.co/r/gce_instance/instance_id": "host",
"g.co/r/gce_instance/zone": "US",
"service.name": "my-app",
"service.version": "1",
}
self.assertEqual(
_extract_resources(resource, resource_regex), expected_extract
)
def test_non_matching_regex(self):
resource_regex = re.compile(r"this-regex-matches-nothing")
resource = Resource(
attributes={
"cloud.account.id": 123,
"host.id": "host",
"cloud.zone": "US",
"cloud.provider": "gcp",
"extra_info": "extra",
"gcp.resource_type": "gce_instance",
"not_gcp_resource": "value",
}
)
expected_extract = {
"g.co/r/gce_instance/project_id": "123",
"g.co/r/gce_instance/instance_id": "host",
"g.co/r/gce_instance/zone": "US",
}
self.assertEqual(
_extract_resources(resource, resource_regex), expected_extract
)
def test_extract_well_formed_resources(self):
resource = Resource(
attributes={
"cloud.account.id": 123,
"host.id": "host",
"cloud.zone": "US",
"cloud.provider": "gcp",
"extra_info": "extra",
"gcp.resource_type": "gce_instance",
"not_gcp_resource": "value",
}
)
expected_extract = {
"g.co/r/gce_instance/project_id": "123",
"g.co/r/gce_instance/instance_id": "host",
"g.co/r/gce_instance/zone": "US",
}
self.assertEqual(_extract_resources(resource), expected_extract)
def test_extract_malformed_resources(self):
# This resource doesn't have all the fields required for a gce_instance
# Specifically its missing "host.id", "cloud.zone", "cloud.account.id"
resource = Resource(
attributes={
"gcp.resource_type": "gce_instance",
"cloud.provider": "gcp",
}
)
# Should throw when passed a malformed GCP resource dict
self.assertRaises(KeyError, _extract_resources, resource)
def test_extract_unsupported_gcp_resources(self):
# Unsupported gcp resources will be ignored
resource = Resource(
attributes={
"cloud.account.id": "123",
"host.id": "host",
"extra_info": "extra",
"not_gcp_resource": "value",
"gcp.resource_type": "unsupported_gcp_resource",
"cloud.provider": "gcp",
}
)
self.assertEqual(_extract_resources(resource), {})
def test_extract_unsupported_provider_resources(self):
# Resources with currently unsupported providers will be ignored
resource = Resource(
attributes={
"cloud.account.id": "123",
"host.id": "host",
"extra_info": "extra",
"not_gcp_resource": "value",
"cloud.provider": "aws",
}
)
self.assertEqual(_extract_resources(resource), {})
def test_truncate_string(self):
"""Cloud Trace API imposes limits on the length of many things,
e.g. strings, number of events, number of attributes. We truncate
these things before sending it to the API as an optimization.
"""
self.assertEqual(_truncate_str("aaaa", limit=1), ("a", 3))
self.assertEqual(_truncate_str("aaaa", limit=5), ("aaaa", 0))
self.assertEqual(_truncate_str("aaaa", limit=4), ("aaaa", 0))
self.assertEqual(_truncate_str("中文翻译", limit=4), ("中", 9))
def test_strip_characters(self):
self.assertEqual("0.10.0", _strip_characters("0.10.0b"))
self.assertEqual("1.20.5", _strip_characters("1.20.5"))
self.assertEqual("3.1.0", _strip_characters("3.1.0beta"))
self.assertEqual("4.2.0", _strip_characters("4b.2rc.0a"))
self.assertEqual("6.20.15", _strip_characters("b6.20.15"))
# pylint: disable=no-member
def test_extract_span_kind(self):
self.assertEqual(
_extract_span_kind(SpanKind.INTERNAL), ProtoSpan.SpanKind.INTERNAL
)
self.assertEqual(
_extract_span_kind(SpanKind.CLIENT), ProtoSpan.SpanKind.CLIENT
)
self.assertEqual(
_extract_span_kind(SpanKind.SERVER), ProtoSpan.SpanKind.SERVER
)
self.assertEqual(
_extract_span_kind(SpanKind.CONSUMER), ProtoSpan.SpanKind.CONSUMER
)
self.assertEqual(
_extract_span_kind(SpanKind.PRODUCER), ProtoSpan.SpanKind.PRODUCER
)
self.assertEqual(
_extract_span_kind(-1), ProtoSpan.SpanKind.SPAN_KIND_UNSPECIFIED
)<|fim▁end|> | def test_extract_status_code_unset(self):
self.assertIsNone(
_extract_status(SpanStatus(status_code=StatusCode.UNSET)) |
<|file_name|>btree.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// btree.rs
//
//! Starting implementation of a btree for rust.
//! Structure inspired by github user davidhalperin's gist.
///A B-tree contains a root node (which contains a vector of elements),
///a length (the height of the tree), and lower and upper bounds on the
///number of elements that a given node can contain.
use std::fmt;
use std::fmt::Show;
#[allow(missing_doc)]
pub struct BTree<K, V> {
root: Node<K, V>,
len: uint,
lower_bound: uint,
upper_bound: uint
}
impl<K: TotalOrd, V> BTree<K, V> {
///Returns new BTree with root node (leaf) and user-supplied lower bound
///The lower bound applies to every node except the root node.
pub fn new(k: K, v: V, lb: uint) -> BTree<K, V> {
BTree {
root: Node::new_leaf(vec!(LeafElt::new(k, v))),
len: 1,
lower_bound: lb,
upper_bound: 2 * lb
}
}
///Helper function for clone: returns new BTree with supplied root node,
///length, and lower bound. For use when the length is known already.
fn new_with_node_len(n: Node<K, V>,
length: uint,
lb: uint) -> BTree<K, V> {
BTree {
root: n,
len: length,
lower_bound: lb,
upper_bound: 2 * lb
}
}
}
//We would probably want to remove the dependence on the Clone trait in the future.
//It is here as a crutch to ensure values can be passed around through the tree's nodes
//especially during insertions and deletions.
impl<K: Clone + TotalOrd, V: Clone> BTree<K, V> {
///Returns the value of a given key, which may not exist in the tree.
///Calls the root node's get method.
pub fn get(self, k: K) -> Option<V> {
return self.root.get(k);
}
///An insert method that uses the clone() feature for support.
pub fn insert(mut self, k: K, v: V) -> BTree<K, V> {
let (a, b) = self.root.clone().insert(k, v, self.upper_bound.clone());
if b {
match a.clone() {
LeafNode(leaf) => {
self.root = Node::new_leaf(leaf.clone().elts);
}
BranchNode(branch) => {
self.root = Node::new_branch(branch.clone().elts,
branch.clone().rightmost_child);
}
}
}
self
}
}
impl<K: Clone + TotalOrd, V: Clone> Clone for BTree<K, V> {
///Implements the Clone trait for the BTree.
///Uses a helper function/constructor to produce a new BTree.
fn clone(&self) -> BTree<K, V> {
BTree::new_with_node_len(self.root.clone(), self.len, self.lower_bound)
}
}
impl<K: TotalOrd, V: TotalEq> Eq for BTree<K, V> {
fn eq(&self, other: &BTree<K, V>) -> bool {
self.root.cmp(&other.root) == Equal
}
}
impl<K: TotalOrd, V: TotalEq> TotalEq for BTree<K, V> {}
impl<K: TotalOrd, V: TotalEq> Ord for BTree<K, V> {
fn lt(&self, other: &BTree<K, V>) -> bool {
self.cmp(other) == Less
}
}
impl<K: TotalOrd, V: TotalEq> TotalOrd for BTree<K, V> {
///Returns an ordering based on the root nodes of each BTree.
fn cmp(&self, other: &BTree<K, V>) -> Ordering {
self.root.cmp(&other.root)
}
}
impl<K: fmt::Show + TotalOrd, V: fmt::Show> fmt::Show for BTree<K, V> {
///Returns a string representation of the BTree
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.root.fmt(f)
}
}
//Node types
//A node is either a LeafNode or a BranchNode, which contain either a Leaf or a Branch.
//Branches contain BranchElts, which contain a left child (another node) and a key-value
//pair. Branches also contain the rightmost child of the elements in the array.
//Leaves contain LeafElts, which do not have children.
enum Node<K, V> {
LeafNode(Leaf<K, V>),
BranchNode(Branch<K, V>)
}
//Node functions/methods
impl<K: TotalOrd, V> Node<K, V> {
///Creates a new leaf node given a vector of elements.
fn new_leaf(vec: Vec<LeafElt<K, V>>) -> Node<K,V> {
LeafNode(Leaf::new(vec))
}
///Creates a new branch node given a vector of an elements and a pointer to a rightmost child.
fn new_branch(vec: Vec<BranchElt<K, V>>, right: Box<Node<K, V>>)
-> Node<K, V> {
BranchNode(Branch::new(vec, right))
}
///Determines whether the given Node contains a Branch or a Leaf.
///Used in testing.
fn is_leaf(&self) -> bool {
match self {
&LeafNode(..) => true,
&BranchNode(..) => false
}
}
///A binary search function for Nodes.
///Calls either the Branch's or the Leaf's bsearch function.
fn bsearch_node(&self, k: K) -> Option<uint> {
match self {
&LeafNode(ref leaf) => leaf.bsearch_leaf(k),
&BranchNode(ref branch) => branch.bsearch_branch(k)
}
}
}
impl<K: Clone + TotalOrd, V: Clone> Node<K, V> {
///Returns the corresponding value to the provided key.
///get() is called in different ways on a branch or a leaf.
fn get(&self, k: K) -> Option<V> {
match *self {
LeafNode(ref leaf) => return leaf.get(k),
BranchNode(ref branch) => return branch.get(k)
}
}
///Matches on the Node, then performs and returns the appropriate insert method.
fn insert(self, k: K, v: V, ub: uint) -> (Node<K, V>, bool) {
match self {
LeafNode(leaf) => leaf.insert(k, v, ub),
BranchNode(branch) => branch.insert(k, v, ub)
}
}
}
impl<K: Clone + TotalOrd, V: Clone> Clone for Node<K, V> {
///Returns a new node based on whether or not it is a branch or a leaf.
fn clone(&self) -> Node<K, V> {
match *self {
LeafNode(ref leaf) => {
Node::new_leaf(leaf.elts.clone())
}
BranchNode(ref branch) => {
Node::new_branch(branch.elts.clone(),
branch.rightmost_child.clone())
}
}
}
}
impl<K: TotalOrd, V: TotalEq> Eq for Node<K, V> {
fn eq(&self, other: &Node<K, V>) -> bool {
match *self{
BranchNode(ref branch) => {
if other.is_leaf() {
return false;
}
match *other {
BranchNode(ref branch2) => branch.cmp(branch2) == Equal,
LeafNode(..) => false
}
}
LeafNode(ref leaf) => {
match *other {
LeafNode(ref leaf2) => leaf.cmp(leaf2) == Equal,
BranchNode(..) => false
}
}
}
}
}
impl<K: TotalOrd, V: TotalEq> TotalEq for Node<K, V> {}
impl<K: TotalOrd, V: TotalEq> Ord for Node<K, V> {
fn lt(&self, other: &Node<K, V>) -> bool {
self.cmp(other) == Less
}
}
impl<K: TotalOrd, V: TotalEq> TotalOrd for Node<K, V> {
///Implementation of TotalOrd for Nodes.
fn cmp(&self, other: &Node<K, V>) -> Ordering {
match *self {
LeafNode(ref leaf) => {
match *other {
LeafNode(ref leaf2) => leaf.cmp(leaf2),
BranchNode(_) => Less
}
}
BranchNode(ref branch) => {
match *other {
BranchNode(ref branch2) => branch.cmp(branch2),
LeafNode(_) => Greater
}
}
}
}
}
impl<K: fmt::Show + TotalOrd, V: fmt::Show> fmt::Show for Node<K, V> {
///Returns a string representation of a Node.
///Will iterate over the Node and show "Key: x, value: y, child: () // "
///for all elements in the Node. "Child" only exists if the Node contains
///a branch.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LeafNode(ref leaf) => leaf.fmt(f),
BranchNode(ref branch) => branch.fmt(f),
}
}
}
//A leaf is a vector with elements that contain no children. A leaf also
//does not contain a rightmost child.
struct Leaf<K, V> {
elts: Vec<LeafElt<K, V>>
}
//Vector of values with children, plus a rightmost child (greater than all)
struct Branch<K, V> {
elts: Vec<BranchElt<K,V>>,
rightmost_child: Box<Node<K, V>>,
}
impl<K: TotalOrd, V> Leaf<K, V> {
///Creates a new Leaf from a vector of LeafElts.
fn new(vec: Vec<LeafElt<K, V>>) -> Leaf<K, V> {
Leaf {
elts: vec
}
}
///Searches a leaf for a spot for a new element using a binary search.
///Returns None if the element is already in the vector.
fn bsearch_leaf(&self, k: K) -> Option<uint> {
let mut high: uint = self.elts.len();
let mut low: uint = 0;
let mut midpoint: uint = (high - low) / 2 ;
if midpoint == high {
midpoint = 0;
}
loop {
let order = self.elts.get(midpoint).key.cmp(&k);
match order {
Equal => {
return None;
}
Greater => {
if midpoint > 0 {
if self.elts.get(midpoint - 1).key.cmp(&k) == Less {
return Some(midpoint);
}
else {
let tmp = midpoint;
midpoint = midpoint / 2;
high = tmp;
continue;
}
}
else {
return Some(0);
}
}
Less => {
if midpoint + 1 < self.elts.len() {
if self.elts.get(midpoint + 1).key.cmp(&k) == Greater {
return Some(midpoint);
}
else {
let tmp = midpoint;
midpoint = (high + low) / 2;
low = tmp;
}
}
else {
return Some(self.elts.len());
}
}
}
}
}
}
impl<K: Clone + TotalOrd, V: Clone> Leaf<K, V> {
///Returns the corresponding value to the supplied key.
fn get(&self, k: K) -> Option<V> {
for s in self.elts.iter() {
let order = s.key.cmp(&k);
match order {
Equal => return Some(s.value.clone()),
_ => {}
}
}
return None;
}
///Uses clone() to facilitate inserting new elements into a tree.
fn insert(mut self, k: K, v: V, ub: uint) -> (Node<K, V>, bool) {
let to_insert = LeafElt::new(k, v);
let index: Option<uint> = self.bsearch_leaf(to_insert.clone().key);
//Check index to see whether we actually inserted the element into the vector.
match index {
//If the index is None, the new element already exists in the vector.
None => {
return (Node::new_leaf(self.clone().elts), false);
}
//If there is an index, insert at that index.
_ => {
if index.unwrap() >= self.elts.len() {
self.elts.push(to_insert.clone());
}
else {
self.elts.insert(index.unwrap(), to_insert.clone());
}
}
}
//If we have overfilled the vector (by making its size greater than the
//upper bound), we return a new Branch with one element and two children.
if self.elts.len() > ub {
let midpoint_opt = self.elts.remove(ub / 2);
let midpoint = midpoint_opt.unwrap();
let (left_leaf, right_leaf) = self.elts.partition(|le|
le.key.cmp(&midpoint.key.clone())
== Less);
let branch_return = Node::new_branch(vec!(BranchElt::new(midpoint.key.clone(),
midpoint.value.clone(),
box Node::new_leaf(left_leaf))),
box Node::new_leaf(right_leaf));
return (branch_return, true);
}
(Node::new_leaf(self.elts.clone()), true)
}
}
impl<K: Clone + TotalOrd, V: Clone> Clone for Leaf<K, V> {
///Returns a new Leaf with the same elts.
fn clone(&self) -> Leaf<K, V> {
Leaf::new(self.elts.clone())
}
}
impl<K: TotalOrd, V: TotalEq> Eq for Leaf<K, V> {
fn eq(&self, other: &Leaf<K, V>) -> bool {
self.elts == other.elts
}
}
impl<K: TotalOrd, V: TotalEq> TotalEq for Leaf<K, V> {}
impl<K: TotalOrd, V: TotalEq> Ord for Leaf<K, V> {
fn lt(&self, other: &Leaf<K, V>) -> bool {
self.cmp(other) == Less
}
}
impl<K: TotalOrd, V: TotalEq> TotalOrd for Leaf<K, V> {
///Returns an ordering based on the first element of each Leaf.
fn cmp(&self, other: &Leaf<K, V>) -> Ordering {
if self.elts.len() > other.elts.len() {
return Greater;
}
if self.elts.len() < other.elts.len() {
return Less;
}
self.elts.get(0).cmp(other.elts.get(0))
}
}
impl<K: fmt::Show + TotalOrd, V: fmt::Show> fmt::Show for Leaf<K, V> {
///Returns a string representation of a Leaf.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (i, s) in self.elts.iter().enumerate() {
if i != 0 { try!(write!(f, " // ")) }
try!(write!(f, "{}", *s))
}
Ok(())
}
}
impl<K: TotalOrd, V> Branch<K, V> {
///Creates a new Branch from a vector of BranchElts and a rightmost child (a node).
fn new(vec: Vec<BranchElt<K, V>>, right: Box<Node<K, V>>)
-> Branch<K, V> {
Branch {
elts: vec,
rightmost_child: right
}
}
fn bsearch_branch(&self, k: K) -> Option<uint> {
let mut midpoint: uint = self.elts.len() / 2;
let mut high: uint = self.elts.len();
let mut low: uint = 0u;
if midpoint == high {
midpoint = 0u;
}
loop {
let order = self.elts.get(midpoint).key.cmp(&k);
match order {
Equal => {
return None;
}
Greater => {
if midpoint > 0 {
if self.elts.get(midpoint - 1).key.cmp(&k) == Less {
return Some(midpoint);
}
else {
let tmp = midpoint;
midpoint = (midpoint - low) / 2;
high = tmp;
continue;
}
}
else {
return Some(0);
}
}
Less => {
if midpoint + 1 < self.elts.len() {
if self.elts.get(midpoint + 1).key.cmp(&k) == Greater {
return Some(midpoint);
}
else {
let tmp = midpoint;
midpoint = (high - midpoint) / 2;
low = tmp;
}
}
else {
return Some(self.elts.len());
}
}
}
}
}
}
impl<K: Clone + TotalOrd, V: Clone> Branch<K, V> {
///Returns the corresponding value to the supplied key.
///If the key is not there, find the child that might hold it.
fn get(&self, k: K) -> Option<V> {
for s in self.elts.iter() {
let order = s.key.cmp(&k);
match order {
Less => return s.left.get(k),
Equal => return Some(s.value.clone()),
_ => {}
}
}
self.rightmost_child.get(k)
}
///An insert method that uses .clone() for support.
fn insert(mut self, k: K, v: V, ub: uint) -> (Node<K, V>, bool) {
let mut new_branch = Node::new_branch(self.clone().elts, self.clone().rightmost_child);
let mut outcome = false;
let index: Option<uint> = new_branch.bsearch_node(k.clone());
//First, find which path down the tree will lead to the appropriate leaf
//for the key-value pair.
match index.clone() {
None => {
return (Node::new_branch(self.clone().elts,
self.clone().rightmost_child),
outcome);
}
_ => {
if index.unwrap() == self.elts.len() {
let new_outcome = self.clone().rightmost_child.insert(k.clone(),
v.clone(),
ub.clone());
new_branch = new_outcome.clone().val0();
outcome = new_outcome.val1();
}
else {
let new_outcome = self.elts.get(index.unwrap()).left.clone().insert(k.clone(),
v.clone(),
ub.clone());
new_branch = new_outcome.clone().val0();
outcome = new_outcome.val1();
}
//Check to see whether a branch or a leaf was returned from the
//tree traversal.
match new_branch.clone() {
//If we have a leaf, we do not need to resize the tree,
//so we can return false.
LeafNode(..) => {
if index.unwrap() == self.elts.len() {
self.rightmost_child = box new_branch.clone();
}
else {
self.elts.get_mut(index.unwrap()).left = box new_branch.clone();
}
return (Node::new_branch(self.clone().elts,
self.clone().rightmost_child),
true);
}
//If we have a branch, we might need to refactor the tree.
BranchNode(..) => {}
}
}
}
//If we inserted something into the tree, do the following:
if outcome {
match new_branch.clone() {
//If we have a new leaf node, integrate it into the current branch
//and return it, saying we have inserted a new element.
LeafNode(..) => {
if index.unwrap() == self.elts.len() {
self.rightmost_child = box new_branch;
}
else {
self.elts.get_mut(index.unwrap()).left = box new_branch;
}
return (Node::new_branch(self.clone().elts,
self.clone().rightmost_child),
true);
}
//If we have a new branch node, attempt to insert it into the tree
//as with the key-value pair, then check to see if the node is overfull.
BranchNode(branch) => {
let new_elt = branch.elts.get(0).clone();
let new_elt_index = self.bsearch_branch(new_elt.clone().key);
match new_elt_index {
None => {
return (Node::new_branch(self.clone().elts,
self.clone().rightmost_child),
false);
}
_ => {
self.elts.insert(new_elt_index.unwrap(), new_elt);
if new_elt_index.unwrap() + 1 >= self.elts.len() {
self.rightmost_child = branch.clone().rightmost_child;
}
else {
self.elts.get_mut(new_elt_index.unwrap() + 1).left =
branch.clone().rightmost_child;
}
}
}
}
}
//If the current node is overfilled, create a new branch with one element
//and two children.
if self.elts.len() > ub {
let midpoint = self.elts.remove(ub / 2).unwrap();
let (new_left, new_right) = self.clone().elts.partition(|le|
midpoint.key.cmp(&le.key)
== Greater);
new_branch = Node::new_branch(
vec!(BranchElt::new(midpoint.clone().key,
midpoint.clone().value,
box Node::new_branch(new_left,
midpoint.clone().left))),
box Node::new_branch(new_right, self.clone().rightmost_child));
return (new_branch, true);
}
}
(Node::new_branch(self.elts.clone(), self.rightmost_child.clone()), outcome)
}
}
impl<K: Clone + TotalOrd, V: Clone> Clone for Branch<K, V> {
///Returns a new branch using the clone methods of the Branch's internal variables.
fn clone(&self) -> Branch<K, V> {
Branch::new(self.elts.clone(), self.rightmost_child.clone())
}
}
impl<K: TotalOrd, V: TotalEq> Eq for Branch<K, V> {
fn eq(&self, other: &Branch<K, V>) -> bool {
self.elts == other.elts
}
}
impl<K: TotalOrd, V: TotalEq> TotalEq for Branch<K, V> {}
impl<K: TotalOrd, V: TotalEq> Ord for Branch<K, V> {
fn lt(&self, other: &Branch<K, V>) -> bool {
self.cmp(other) == Less
}
}
impl<K: TotalOrd, V: TotalEq> TotalOrd for Branch<K, V> {
///Compares the first elements of two branches to determine an ordering
fn cmp(&self, other: &Branch<K, V>) -> Ordering {
if self.elts.len() > other.elts.len() {
return Greater;
}
if self.elts.len() < other.elts.len() {
return Less;
}
self.elts.get(0).cmp(other.elts.get(0))
}
}
impl<K: fmt::Show + TotalOrd, V: fmt::Show> fmt::Show for Branch<K, V> {
///Returns a string representation of a Branch.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (i, s) in self.elts.iter().enumerate() {
if i != 0 { try!(write!(f, " // ")) }
try!(write!(f, "{}", *s))
}
write!(f, " // rightmost child: ({}) ", *self.rightmost_child)
}
}
//A LeafElt contains no left child, but a key-value pair.
struct LeafElt<K, V> {
key: K,
value: V
}
//A BranchElt has a left child in insertion to a key-value pair.
struct BranchElt<K, V> {
left: Box<Node<K, V>>,
key: K,
value: V
}
impl<K: TotalOrd, V> LeafElt<K, V> {
///Creates a new LeafElt from a supplied key-value pair.
fn new(k: K, v: V) -> LeafElt<K, V> {
LeafElt {
key: k,
value: v
}
}
}
impl<K: Clone + TotalOrd, V: Clone> Clone for LeafElt<K, V> {
///Returns a new LeafElt by cloning the key and value.
fn clone(&self) -> LeafElt<K, V> {
LeafElt::new(self.key.clone(), self.value.clone())
}
}
impl<K: TotalOrd, V: TotalEq> Eq for LeafElt<K, V> {
fn eq(&self, other: &LeafElt<K, V>) -> bool {
self.key == other.key && self.value == other.value
}
}
impl<K: TotalOrd, V: TotalEq> TotalEq for LeafElt<K, V> {}
impl<K: TotalOrd, V: TotalEq> Ord for LeafElt<K, V> {
fn lt(&self, other: &LeafElt<K, V>) -> bool {
self.cmp(other) == Less
}
}
impl<K: TotalOrd, V: TotalEq> TotalOrd for LeafElt<K, V> {
///Returns an ordering based on the keys of the LeafElts.
fn cmp(&self, other: &LeafElt<K, V>) -> Ordering {
self.key.cmp(&other.key)
}
}
impl<K: fmt::Show + TotalOrd, V: fmt::Show> fmt::Show for LeafElt<K, V> {
///Returns a string representation of a LeafElt.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Key: {}, value: {};", self.key, self.value)
}
}
impl<K: TotalOrd, V> BranchElt<K, V> {
///Creates a new BranchElt from a supplied key, value, and left child.
fn new(k: K, v: V, n: Box<Node<K, V>>) -> BranchElt<K, V> {
BranchElt {
left: n,
key: k,
value: v
}
}
}
impl<K: Clone + TotalOrd, V: Clone> Clone for BranchElt<K, V> {
///Returns a new BranchElt by cloning the key, value, and left child.
fn clone(&self) -> BranchElt<K, V> {
BranchElt::new(self.key.clone(),
self.value.clone(),
self.left.clone())
}
}
impl<K: TotalOrd, V: TotalEq> Eq for BranchElt<K, V>{
fn eq(&self, other: &BranchElt<K, V>) -> bool {
self.key == other.key && self.value == other.value
}
}
impl<K: TotalOrd, V: TotalEq> TotalEq for BranchElt<K, V>{}
impl<K: TotalOrd, V: TotalEq> Ord for BranchElt<K, V> {
fn lt(&self, other: &BranchElt<K, V>) -> bool {
self.cmp(other) == Less
}
}
impl<K: TotalOrd, V: TotalEq> TotalOrd for BranchElt<K, V> {
///Fulfills TotalOrd for BranchElts
fn cmp(&self, other: &BranchElt<K, V>) -> Ordering {
self.key.cmp(&other.key)
}
}
impl<K: fmt::Show + TotalOrd, V: fmt::Show> fmt::Show for BranchElt<K, V> {
/// Returns string containing key, value, and child (which should recur to a
/// leaf) Consider changing in future to be more readable.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Key: {}, value: {}, (child: {})",
self.key, self.value, *self.left)
}
}
#[cfg(test)]
mod test_btree {
use super::{BTree, Node, LeafElt};
//Tests the functionality of the insert methods (which are unfinished).
#[test]
fn insert_test_one() {
let b = BTree::new(1, "abc".to_owned(), 2);
let is_insert = b.insert(2, "xyz".to_owned());
//println!("{}", is_insert.clone().to_str());
assert!(is_insert.root.is_leaf());
}
#[test]
fn insert_test_two() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_owned());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3));
let b = BTree::new_with_node_len(n, 3, 2);
//println!("{}", b.clone().insert(4, "ddd".to_owned()).to_str());
assert!(b.insert(4, "ddd".to_owned()).root.is_leaf());
}
#[test]
fn insert_test_three() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_owned());
let leaf_elt_4 = LeafElt::new(4, "ddd".to_owned());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let b = BTree::new_with_node_len(n, 3, 2);
//println!("{}", b.clone().insert(5, "eee".to_owned()).to_str());
assert!(!b.insert(5, "eee".to_owned()).root.is_leaf());
}
#[test]
fn insert_test_four() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_owned());
let leaf_elt_4 = LeafElt::new(4, "ddd".to_owned());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let mut b = BTree::new_with_node_len(n, 3, 2);
b = b.clone().insert(5, "eee".to_owned());
b = b.clone().insert(6, "fff".to_owned());
b = b.clone().insert(7, "ggg".to_owned());
b = b.clone().insert(8, "hhh".to_owned());
b = b.clone().insert(0, "omg".to_owned());
//println!("{}", b.clone().to_str());
assert!(!b.root.is_leaf());
}
#[test]
fn bsearch_test_one() {
let b = BTree::new(1, "abc".to_owned(), 2);
assert_eq!(Some(1), b.root.bsearch_node(2));
}
#[test]
fn bsearch_test_two() {
let b = BTree::new(1, "abc".to_owned(), 2);
assert_eq!(Some(0), b.root.bsearch_node(0));
}
#[test]
fn bsearch_test_three() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(4, "ccc".to_owned());
let leaf_elt_4 = LeafElt::new(5, "ddd".to_owned());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let b = BTree::new_with_node_len(n, 3, 2);
assert_eq!(Some(2), b.root.bsearch_node(3));<|fim▁hole|> fn bsearch_test_four() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(4, "ccc".to_owned());
let leaf_elt_4 = LeafElt::new(5, "ddd".to_owned());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let b = BTree::new_with_node_len(n, 3, 2);
assert_eq!(Some(4), b.root.bsearch_node(800));
}
//Tests the functionality of the get method.
#[test]
fn get_test() {
let b = BTree::new(1, "abc".to_owned(), 2);
let val = b.get(1);
assert_eq!(val, Some("abc".to_owned()));
}
//Tests the BTree's clone() method.
#[test]
fn btree_clone_test() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b2 = b.clone();
assert!(b.root == b2.root)
}
//Tests the BTree's cmp() method when one node is "less than" another.
#[test]
fn btree_cmp_test_less() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b2 = BTree::new(2, "bcd".to_owned(), 2);
assert!(&b.cmp(&b2) == &Less)
}
//Tests the BTree's cmp() method when two nodes are equal.
#[test]
fn btree_cmp_test_eq() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b2 = BTree::new(1, "bcd".to_owned(), 2);
assert!(&b.cmp(&b2) == &Equal)
}
//Tests the BTree's cmp() method when one node is "greater than" another.
#[test]
fn btree_cmp_test_greater() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b2 = BTree::new(2, "bcd".to_owned(), 2);
assert!(&b2.cmp(&b) == &Greater)
}
//Tests the BTree's to_str() method.
#[test]
fn btree_tostr_test() {
let b = BTree::new(1, "abc".to_owned(), 2);
assert_eq!(b.to_str(), "Key: 1, value: abc;".to_owned())
}
}<|fim▁end|> | }
#[test] |
<|file_name|>scatter_with_labels.py<|end_file_name|><|fim▁begin|>"""
Simple Scatter Plot with Labels
===============================
This example shows a basic scatter plot with labels created with Altair.
"""
# category: scatter plots
import altair as alt
import pandas as pd
data = pd.DataFrame({
'x': [1, 3, 5, 7, 9],
'y': [1, 3, 5, 7, 9],
'label': ['A', 'B', 'C', 'D', 'E']
})
bars = alt.Chart(data).mark_point().encode(
x='x:Q',
y='y:Q'
)
text = bars.mark_text(
align='left',
baseline='middle',
dx=7<|fim▁hole|> text='label'
)
bars + text<|fim▁end|> | ).encode( |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.