prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
from __future__ import print_function
import StringIO
import os
import os.path
import errno
import sqlite3<|fim▁hole|>import smadata2.db.mock
from smadata2 import check
def removef(filename):
try:
os.remove(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
class BaseDBChecker(object):
def setUp(self):
self.db = self.opendb()
self.sample_data()
def tearDown(self):
pass
def sample_data(self):
pass
class MockDBChecker(BaseDBChecker):
def opendb(self):
return smadata2.db.mock.MockDatabase()
class BaseSQLite(object):
def prepare_sqlite(self):
self.dbname = "__testdb__smadata2_%s_.sqlite" % self.__class__.__name__
self.bakname = self.dbname + ".bak"
# Start with a blank slate
removef(self.dbname)
removef(self.bakname)
self.prepopulate()
if os.path.exists(self.dbname):
self.original = open(self.dbname).read()
else:
self.original = None
def prepopulate(self):
pass
class SQLiteDBChecker(BaseSQLite, BaseDBChecker):
def opendb(self):
self.prepare_sqlite()
return smadata2.db.sqlite.create_or_update(self.dbname)
def tearDown(self):
removef(self.dbname)
removef(self.bakname)
super(SQLiteDBChecker, self).tearDown()
class SimpleChecks(BaseDBChecker):
def test_trivial(self):
assert isinstance(self.db, smadata2.db.base.BaseDatabase)
def test_add_get_historic(self):
# Serial is defined as INTEGER, but we abuse the fact that
# sqlite doesn't actually make a distinction
serial = "__TEST__"
self.db.add_historic(serial, 0, 0)
self.db.add_historic(serial, 300, 10)
self.db.add_historic(serial, 3600, 20)
v0 = self.db.get_one_historic(serial, 0)
assert_equals(v0, 0)
v300 = self.db.get_one_historic(serial, 300)
assert_equals(v300, 10)
v3600 = self.db.get_one_historic(serial, 3600)
assert_equals(v3600, 20)
vmissing = self.db.get_one_historic(serial, 9999)
assert vmissing is None
def test_get_last_historic_missing(self):
serial = "__TEST__"
last = self.db.get_last_historic(serial)
assert last is None
def test_get_last_historic(self):
serial = "__TEST__"
self.db.add_historic(serial, 0, 0)
assert_equals(self.db.get_last_historic(serial), 0)
self.db.add_historic(serial, 300, 0)
assert_equals(self.db.get_last_historic(serial), 300)
self.db.add_historic(serial, 3600, 0)
assert_equals(self.db.get_last_historic(serial), 3600)
self.db.add_historic(serial, 2000, 0)
assert_equals(self.db.get_last_historic(serial), 3600)
class AggregateChecks(BaseDBChecker):
def sample_data(self):
super(AggregateChecks, self).sample_data()
self.serial1 = "__TEST__1"
self.serial2 = "__TEST__2"
self.dawn = 8*3600
self.dusk = 20*3600
sampledata = check.generate_linear(0, self.dawn, self.dusk, 24*3600,
0, 1)
for ts, y in sampledata:
self.db.add_historic(self.serial1, ts, y)
self.db.add_historic(self.serial2, ts, 2*y)
def test_basic(self):
for ts in range(0, self.dawn, 300):
y1 = self.db.get_one_historic(self.serial1, ts)
y2 = self.db.get_one_historic(self.serial2, ts)
assert_equals(y1, 0)
assert_equals(y2, 0)
for i, ts in enumerate(range(self.dawn, self.dusk, 300)):
y1 = self.db.get_one_historic(self.serial1, ts)
y2 = self.db.get_one_historic(self.serial2, ts)
assert_equals(y1, i)
assert_equals(y2, 2*i)
val = (self.dusk - self.dawn - 1) / 300
for ts in range(self.dusk, 24*3600, 300):
y1 = self.db.get_one_historic(self.serial1, ts)
y2 = self.db.get_one_historic(self.serial2, ts)
assert_equals(y1, val)
assert_equals(y2, 2*val)
def test_aggregate_one(self):
val = self.db.get_aggregate_one_historic(self.dusk,
(self.serial1, self.serial2))
assert_equals(val, 3*((self.dusk - self.dawn - 2) / 300))
def check_aggregate_range(self, from_, to_):
results = self.db.get_aggregate_historic(from_, to_,
(self.serial1, self.serial2))
first = results[0][0]
last = results[-1][0]
assert_equals(first, from_)
assert_equals(last, to_ - 300)
for ts, y in results:
if ts < self.dawn:
assert_equals(y, 0)
elif ts < self.dusk:
assert_equals(y, 3*((ts - self.dawn) / 300))
else:
assert_equals(y, 3*((self.dusk - self.dawn - 1) / 300))
def test_aggregate(self):
yield self.check_aggregate_range, 0, 24*3600
yield self.check_aggregate_range, 8*3600, 20*3600
yield self.check_aggregate_range, 13*3600, 14*3600
#
# Construct the basic tests as a cross-product
#
for cset in (SimpleChecks, AggregateChecks):
for db in (MockDBChecker, SQLiteDBChecker):
name = "_".join(("Test", cset.__name__, db.__name__))
globals()[name] = type(name, (cset, db), {})
#
# Tests for sqlite schema updating
#
class UpdateSQLiteChecker(Test_SimpleChecks_SQLiteDBChecker):
PRESERVE_RECORD = ("PRESERVE", 0, 31415)
def test_backup(self):
assert os.path.exists(self.bakname)
backup = open(self.bakname).read()
assert_equals(self.original, backup)
def test_preserved(self):
serial, timestamp, tyield = self.PRESERVE_RECORD
assert_equals(self.db.get_last_historic(serial), timestamp)
assert_equals(self.db.get_one_historic(serial, timestamp), tyield)
class TestUpdateNoPVO(UpdateSQLiteChecker):
def prepopulate(self):
DB_MAGIC = 0x71534d41
DB_VERSION = 0
conn = sqlite3.connect(self.dbname)
conn.executescript("""
CREATE TABLE generation (inverter_serial INTEGER,
timestamp INTEGER,
total_yield INTEGER,
PRIMARY KEY (inverter_serial, timestamp));
CREATE TABLE schema (magic INTEGER, version INTEGER);""")
conn.execute("INSERT INTO schema (magic, version) VALUES (?, ?)",
(DB_MAGIC, DB_VERSION))
conn.commit()
conn.execute("""INSERT INTO generation (inverter_serial, timestamp,
total_yield)
VALUES (?, ?, ?)""", self.PRESERVE_RECORD)
conn.commit()
del conn
class TestUpdateV0(UpdateSQLiteChecker):
def prepopulate(self):
DB_MAGIC = 0x71534d41
DB_VERSION = 0
conn = sqlite3.connect(self.dbname)
conn.executescript("""
CREATE TABLE generation (inverter_serial INTEGER,
timestamp INTEGER,
total_yield INTEGER,
PRIMARY KEY (inverter_serial, timestamp));
CREATE TABLE schema (magic INTEGER, version INTEGER);
CREATE TABLE pvoutput (sid STRING,
last_datetime_uploaded INTEGER);""")
conn.execute("INSERT INTO schema (magic, version) VALUES (?, ?)",
(DB_MAGIC, DB_VERSION))
conn.commit()
conn.execute("""INSERT INTO generation (inverter_serial, timestamp,
total_yield)
VALUES (?, ?, ?)""", self.PRESERVE_RECORD)
conn.commit()
del conn
class BadSchemaSQLiteChecker(BaseSQLite):
def setUp(self):
self.prepare_sqlite()
@raises(smadata2.db.WrongSchema)
def test_open(self):
self.db = smadata2.db.SQLiteDatabase(self.dbname)
class TestEmptySQLiteDB(BadSchemaSQLiteChecker):
"""Check that we correctly fail on an empty DB"""
def test_is_empty(self):
assert not os.path.exists(self.dbname)
class TestBadSQLite(BadSchemaSQLiteChecker):
"""Check that we correctly fail attempting to update an unknwon format"""
def prepopulate(self):
conn = sqlite3.connect(self.dbname)
conn.execute("CREATE TABLE unrelated (random STRING, data INTEGER)")
conn.commit()
del conn
@raises(smadata2.db.WrongSchema)
def test_update(self):
db = smadata2.db.sqlite.create_or_update(self.dbname)<|fim▁end|> |
from nose.tools import *
import smadata2.db |
<|file_name|>Condition.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2005-2012 https://github.com/zhuruiboqq
*
* Licensed under the Apache License, Version 2.0 (the "License");
*/
package com.sishuok.es.common.entity.search.filter;
import com.sishuok.es.common.entity.search.SearchOperator;
import com.sishuok.es.common.entity.search.exception.InvlidSearchOperatorException;
import com.sishuok.es.common.entity.search.exception.SearchException;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.Assert;
import java.util.List;
/**
* <p>查询过滤条件</p>
* <p>User: Zhang Kaitao
* <p>Date: 13-1-15 上午7:12
* <p>Version: 1.0
*/
public final class Condition implements SearchFilter {
//查询参数分隔符
public static final String separator = "_";
private String key;
private String searchProperty;
private SearchOperator operator;
private Object value;
/**
* 根据查询key和值生成Condition
*
* @param key 如 name_like
* @param value
* @return
*/
static Condition newCondition(final String key, final Object value) throws SearchException {
Assert.notNull(key, "Condition key must not null");
String[] searchs = StringUtils.split(key, separator);
if (searchs.length == 0) {
throw new SearchException("Condition key format must be : property or property_op");
}
String searchProperty = searchs[0];
SearchOperator operator = null;
if (searchs.length == 1) {
operator = SearchOperator.custom;
} else {
try {
operator = SearchOperator.valueOf(searchs[1]);
} catch (IllegalArgumentException e) {
throw new InvlidSearchOperatorException(searchProperty, searchs[1]);
}
}
boolean allowBlankValue = SearchOperator.isAllowBlankValue(operator);
boolean isValueBlank = (value == null);
isValueBlank = isValueBlank || (value instanceof String && StringUtils.isBlank((String) value));
isValueBlank = isValueBlank || (value instanceof List && ((List) value).size() == 0);
//过滤掉空值,即不参与查询
if (!allowBlankValue && isValueBlank) {
return null;
}
Condition searchFilter = newCondition(searchProperty, operator, value);
return searchFilter;
}
/**
* 根据查询属性、操作符和值生成Condition
*
* @param searchProperty
* @param operator
* @param value
* @return
*/
static Condition newCondition(final String searchProperty, final SearchOperator operator, final Object value) {
return new Condition(searchProperty, operator, value);
}
/**
* @param searchProperty 属性名
* @param operator 操作
* @param value 值
*/
private Condition(final String searchProperty, final SearchOperator operator, final Object value) {
this.searchProperty = searchProperty;
this.operator = operator;
this.value = value;
this.key = this.searchProperty + separator + this.operator;
}
public String getKey() {
return key;
}
public String getSearchProperty() {
return searchProperty;
}
/**
* 获取 操作符
*
* @return
*/
public SearchOperator getOperator() throws InvlidSearchOperatorException {
return operator;
}
/**
* 获取自定义查询使用的操作符
* 1、首先获取前台传的
* 2、返回空
*
* @return
*/
public String getOperatorStr() {
if (operator != null) {
return operator.getSymbol();
}
return "";
}
public Object getValue() {
return value;
}
public void setValue(final Object value) {
this.value = value;
}
public void setOperator(final SearchOperator operator) {
this.operator = operator;
}
public void setSearchProperty(final String searchProperty) {
this.searchProperty = searchProperty;
}
/**
* 得到实体属性名
*
* @return
*/
public String getEntityProperty() {
return searchProperty;
}
/**
* 是否是一元过滤 如is null is not null
*
* @return
*/
public boolean isUnaryFilter() {
String operatorStr = getOperator().getSymbol();
return StringUtils.isNotEmpty(operatorStr) && operatorStr.startsWith("is");
}
<|fim▁hole|> if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Condition that = (Condition) o;
if (key != null ? !key.equals(that.key) : that.key != null) return false;
return true;
}
@Override
public int hashCode() {
return key != null ? key.hashCode() : 0;
}
@Override
public String toString() {
return "Condition{" +
"searchProperty='" + searchProperty + '\'' +
", operator=" + operator +
", value=" + value +
'}';
}
}<|fim▁end|> |
@Override
public boolean equals(Object o) { |
<|file_name|>banner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time<|fim▁hole|>timeformat='%H:%M:%S'
def begin_banner():
print ''
print '[*] swarm starting at '+time.strftime(timeformat,time.localtime())
print ''
def end_banner():
print ''
print '[*] swarm shutting down at '+time.strftime(timeformat,time.localtime())
print ''<|fim▁end|> | |
<|file_name|>kubenet_linux.go<|end_file_name|><|fim▁begin|>// +build linux
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kubenet
import (
"fmt"
"net"
"path/filepath"
"strings"
"sync"
"syscall"
"time"
"io/ioutil"
"github.com/containernetworking/cni/libcni"
cnitypes "github.com/containernetworking/cni/pkg/types"
"github.com/golang/glog"
"github.com/vishvananda/netlink"
"github.com/vishvananda/netlink/nl"
"k8s.io/kubernetes/pkg/api/v1"
"k8s.io/kubernetes/pkg/apis/componentconfig"
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
"k8s.io/kubernetes/pkg/kubelet/network"
"k8s.io/kubernetes/pkg/util/bandwidth"
utildbus "k8s.io/kubernetes/pkg/util/dbus"
utilebtables "k8s.io/kubernetes/pkg/util/ebtables"
utilerrors "k8s.io/kubernetes/pkg/util/errors"
utilexec "k8s.io/kubernetes/pkg/util/exec"
utiliptables "k8s.io/kubernetes/pkg/util/iptables"
utilnet "k8s.io/kubernetes/pkg/util/net"
utilsets "k8s.io/kubernetes/pkg/util/sets"
utilsysctl "k8s.io/kubernetes/pkg/util/sysctl"
"strconv"
"k8s.io/kubernetes/pkg/kubelet/network/hostport"
)
const (
KubenetPluginName = "kubenet"
BridgeName = "cbr0"
DefaultCNIDir = "/opt/cni/bin"
sysctlBridgeCallIPTables = "net/bridge/bridge-nf-call-iptables"
// fallbackMTU is used if an MTU is not specified, and we cannot determine the MTU
fallbackMTU = 1460
// private mac prefix safe to use
// Universally administered and locally administered addresses are distinguished by setting the second-least-significant
// bit of the first octet of the address. If it is 1, the address is locally administered. For example, for address 0a:00:00:00:00:00,
// the first cotet is 0a(hex), the binary form of which is 00001010, where the second-least-significant bit is 1.
privateMACPrefix = "0a:58"
// ebtables Chain to store dedup rules
dedupChain = utilebtables.Chain("KUBE-DEDUP")
// defaultIPAMDir is the default location for the checkpoint files stored by host-local ipam
// https://github.com/containernetworking/cni/tree/master/plugins/ipam/host-local#backends
defaultIPAMDir = "/var/lib/cni/networks"
)
// CNI plugins required by kubenet in /opt/cni/bin or vendor directory
var requiredCNIPlugins = [...]string{"bridge", "host-local", "loopback"}
type kubenetNetworkPlugin struct {
network.NoopNetworkPlugin
host network.Host
netConfig *libcni.NetworkConfig
loConfig *libcni.NetworkConfig
cniConfig libcni.CNI
bandwidthShaper bandwidth.BandwidthShaper
mu sync.Mutex //Mutex for protecting podIPs map, netConfig, and shaper initialization
podIPs map[kubecontainer.ContainerID]string
mtu int
execer utilexec.Interface
nsenterPath string
hairpinMode componentconfig.HairpinMode
hostportHandler hostport.HostportHandler
iptables utiliptables.Interface
sysctl utilsysctl.Interface
ebtables utilebtables.Interface
// vendorDir is passed by kubelet network-plugin-dir parameter.
// kubenet will search for cni binaries in DefaultCNIDir first, then continue to vendorDir.
vendorDir string
nonMasqueradeCIDR string
podCidr string
gateway net.IP
}
func NewPlugin(networkPluginDir string) network.NetworkPlugin {
protocol := utiliptables.ProtocolIpv4
execer := utilexec.New()
dbus := utildbus.New()
sysctl := utilsysctl.New()
iptInterface := utiliptables.New(execer, dbus, protocol)
return &kubenetNetworkPlugin{
podIPs: make(map[kubecontainer.ContainerID]string),
execer: utilexec.New(),
iptables: iptInterface,
sysctl: sysctl,
vendorDir: networkPluginDir,
hostportHandler: hostport.NewHostportHandler(),
nonMasqueradeCIDR: "10.0.0.0/8",
}
}
func (plugin *kubenetNetworkPlugin) Init(host network.Host, hairpinMode componentconfig.HairpinMode, nonMasqueradeCIDR string, mtu int) error {
plugin.host = host
plugin.hairpinMode = hairpinMode
plugin.nonMasqueradeCIDR = nonMasqueradeCIDR
plugin.cniConfig = &libcni.CNIConfig{
Path: []string{DefaultCNIDir, plugin.vendorDir},
}
if mtu == network.UseDefaultMTU {
if link, err := findMinMTU(); err == nil {
plugin.mtu = link.MTU
glog.V(5).Infof("Using interface %s MTU %d as bridge MTU", link.Name, link.MTU)
} else {
plugin.mtu = fallbackMTU
glog.Warningf("Failed to find default bridge MTU, using %d: %v", fallbackMTU, err)
}
} else {
plugin.mtu = mtu
}
// Since this plugin uses a Linux bridge, set bridge-nf-call-iptables=1
// is necessary to ensure kube-proxy functions correctly.
//
// This will return an error on older kernel version (< 3.18) as the module
// was built-in, we simply ignore the error here. A better thing to do is
// to check the kernel version in the future.
plugin.execer.Command("modprobe", "br-netfilter").CombinedOutput()
err := plugin.sysctl.SetSysctl(sysctlBridgeCallIPTables, 1)
if err != nil {
glog.Warningf("can't set sysctl %s: %v", sysctlBridgeCallIPTables, err)
}
plugin.loConfig, err = libcni.ConfFromBytes([]byte(`{
"cniVersion": "0.1.0",
"name": "kubenet-loopback",
"type": "loopback"
}`))
if err != nil {
return fmt.Errorf("Failed to generate loopback config: %v", err)
}
plugin.nsenterPath, err = plugin.execer.LookPath("nsenter")
if err != nil {
return fmt.Errorf("Failed to find nsenter binary: %v", err)
}
// Need to SNAT outbound traffic from cluster
if err = plugin.ensureMasqRule(); err != nil {
return err
}
return nil
}
// TODO: move thic logic into cni bridge plugin and remove this from kubenet
func (plugin *kubenetNetworkPlugin) ensureMasqRule() error {
if _, err := plugin.iptables.EnsureRule(utiliptables.Append, utiliptables.TableNAT, utiliptables.ChainPostrouting,
"-m", "comment", "--comment", "kubenet: SNAT for outbound traffic from cluster",
"-m", "addrtype", "!", "--dst-type", "LOCAL",
"!", "-d", plugin.nonMasqueradeCIDR,
"-j", "MASQUERADE"); err != nil {
return fmt.Errorf("Failed to ensure that %s chain %s jumps to MASQUERADE: %v", utiliptables.TableNAT, utiliptables.ChainPostrouting, err)
}
return nil
}
func findMinMTU() (*net.Interface, error) {
intfs, err := net.Interfaces()
if err != nil {
return nil, err
}
mtu := 999999
defIntfIndex := -1
for i, intf := range intfs {
if ((intf.Flags & net.FlagUp) != 0) && (intf.Flags&(net.FlagLoopback|net.FlagPointToPoint) == 0) {
if intf.MTU < mtu {
mtu = intf.MTU
defIntfIndex = i
}
}
}
if mtu >= 999999 || mtu < 576 || defIntfIndex < 0 {
return nil, fmt.Errorf("no suitable interface: %v", BridgeName)
}
return &intfs[defIntfIndex], nil
}
const NET_CONFIG_TEMPLATE = `{
"cniVersion": "0.1.0",
"name": "kubenet",
"type": "bridge",
"bridge": "%s",
"mtu": %d,
"addIf": "%s",
"isGateway": true,
"ipMasq": false,
"hairpinMode": %t,
"ipam": {
"type": "host-local",<|fim▁hole|> { "dst": "0.0.0.0/0" }
]
}
}`
func (plugin *kubenetNetworkPlugin) Event(name string, details map[string]interface{}) {
if name != network.NET_PLUGIN_EVENT_POD_CIDR_CHANGE {
return
}
plugin.mu.Lock()
defer plugin.mu.Unlock()
podCIDR, ok := details[network.NET_PLUGIN_EVENT_POD_CIDR_CHANGE_DETAIL_CIDR].(string)
if !ok {
glog.Warningf("%s event didn't contain pod CIDR", network.NET_PLUGIN_EVENT_POD_CIDR_CHANGE)
return
}
if plugin.netConfig != nil {
glog.Warningf("Ignoring subsequent pod CIDR update to %s", podCIDR)
return
}
glog.V(5).Infof("PodCIDR is set to %q", podCIDR)
_, cidr, err := net.ParseCIDR(podCIDR)
if err == nil {
setHairpin := plugin.hairpinMode == componentconfig.HairpinVeth
// Set bridge address to first address in IPNet
cidr.IP.To4()[3] += 1
json := fmt.Sprintf(NET_CONFIG_TEMPLATE, BridgeName, plugin.mtu, network.DefaultInterfaceName, setHairpin, podCIDR, cidr.IP.String())
glog.V(2).Infof("CNI network config set to %v", json)
plugin.netConfig, err = libcni.ConfFromBytes([]byte(json))
if err == nil {
glog.V(5).Infof("CNI network config:\n%s", json)
// Ensure cbr0 has no conflicting addresses; CNI's 'bridge'
// plugin will bail out if the bridge has an unexpected one
plugin.clearBridgeAddressesExcept(cidr)
}
plugin.podCidr = podCIDR
plugin.gateway = cidr.IP
}
if err != nil {
glog.Warningf("Failed to generate CNI network config: %v", err)
}
}
func (plugin *kubenetNetworkPlugin) clearBridgeAddressesExcept(keep *net.IPNet) {
bridge, err := netlink.LinkByName(BridgeName)
if err != nil {
return
}
addrs, err := netlink.AddrList(bridge, syscall.AF_INET)
if err != nil {
return
}
for _, addr := range addrs {
if !utilnet.IPNetEqual(addr.IPNet, keep) {
glog.V(2).Infof("Removing old address %s from %s", addr.IPNet.String(), BridgeName)
netlink.AddrDel(bridge, &addr)
}
}
}
// ensureBridgeTxQueueLen() ensures that the bridge interface's TX queue
// length is greater than zero. Due to a CNI <= 0.3.0 'bridge' plugin bug,
// the bridge is initially created with a TX queue length of 0, which gets
// used as the packet limit for FIFO traffic shapers, which drops packets.
// TODO: remove when we can depend on a fixed CNI
func (plugin *kubenetNetworkPlugin) ensureBridgeTxQueueLen() {
bridge, err := netlink.LinkByName(BridgeName)
if err != nil {
return
}
if bridge.Attrs().TxQLen > 0 {
return
}
req := nl.NewNetlinkRequest(syscall.RTM_NEWLINK, syscall.NLM_F_ACK)
msg := nl.NewIfInfomsg(syscall.AF_UNSPEC)
req.AddData(msg)
nameData := nl.NewRtAttr(syscall.IFLA_IFNAME, nl.ZeroTerminated(BridgeName))
req.AddData(nameData)
qlen := nl.NewRtAttr(syscall.IFLA_TXQLEN, nl.Uint32Attr(1000))
req.AddData(qlen)
_, err = req.Execute(syscall.NETLINK_ROUTE, 0)
if err != nil {
glog.V(5).Infof("Failed to set bridge tx queue length: %v", err)
}
}
func (plugin *kubenetNetworkPlugin) Name() string {
return KubenetPluginName
}
func (plugin *kubenetNetworkPlugin) Capabilities() utilsets.Int {
return utilsets.NewInt(network.NET_PLUGIN_CAPABILITY_SHAPING)
}
// setup sets up networking through CNI using the given ns/name and sandbox ID.
// TODO: Don't pass the pod to this method, it only needs it for bandwidth
// shaping and hostport management.
func (plugin *kubenetNetworkPlugin) setup(namespace string, name string, id kubecontainer.ContainerID, pod *v1.Pod) error {
// Bring up container loopback interface
if _, err := plugin.addContainerToNetwork(plugin.loConfig, "lo", namespace, name, id); err != nil {
return err
}
// Hook container up with our bridge
res, err := plugin.addContainerToNetwork(plugin.netConfig, network.DefaultInterfaceName, namespace, name, id)
if err != nil {
return err
}
if res.IP4 == nil {
return fmt.Errorf("CNI plugin reported no IPv4 address for container %v.", id)
}
ip4 := res.IP4.IP.IP.To4()
if ip4 == nil {
return fmt.Errorf("CNI plugin reported an invalid IPv4 address for container %v: %+v.", id, res.IP4)
}
// Explicitly assign mac address to cbr0. If bridge mac address is not explicitly set will adopt the lowest MAC address of the attached veths.
// TODO: Remove this once upstream cni bridge plugin handles this
link, err := netlink.LinkByName(BridgeName)
if err != nil {
return fmt.Errorf("failed to lookup %q: %v", BridgeName, err)
}
macAddr, err := generateHardwareAddr(plugin.gateway)
if err != nil {
return err
}
glog.V(3).Infof("Configure %q mac address to %v", BridgeName, macAddr)
err = netlink.LinkSetHardwareAddr(link, macAddr)
if err != nil {
return fmt.Errorf("Failed to configure %q mac address to %q: %v", BridgeName, macAddr, err)
}
// Put the container bridge into promiscuous mode to force it to accept hairpin packets.
// TODO: Remove this once the kernel bug (#20096) is fixed.
// TODO: check and set promiscuous mode with netlink once vishvananda/netlink supports it
if plugin.hairpinMode == componentconfig.PromiscuousBridge {
output, err := plugin.execer.Command("ip", "link", "show", "dev", BridgeName).CombinedOutput()
if err != nil || strings.Index(string(output), "PROMISC") < 0 {
_, err := plugin.execer.Command("ip", "link", "set", BridgeName, "promisc", "on").CombinedOutput()
if err != nil {
return fmt.Errorf("Error setting promiscuous mode on %s: %v", BridgeName, err)
}
}
// configure the ebtables rules to eliminate duplicate packets by best effort
plugin.syncEbtablesDedupRules(macAddr)
}
plugin.podIPs[id] = ip4.String()
// The host can choose to not support "legacy" features. The remote
// shim doesn't support it (#35457), but the kubelet does.
if !plugin.host.SupportsLegacyFeatures() {
return nil
}
// The first SetUpPod call creates the bridge; get a shaper for the sake of
// initialization
shaper := plugin.shaper()
ingress, egress, err := bandwidth.ExtractPodBandwidthResources(pod.Annotations)
if err != nil {
return fmt.Errorf("Error reading pod bandwidth annotations: %v", err)
}
if egress != nil || ingress != nil {
if err := shaper.ReconcileCIDR(fmt.Sprintf("%s/32", ip4.String()), egress, ingress); err != nil {
return fmt.Errorf("Failed to add pod to shaper: %v", err)
}
}
// Open any hostports the pod's containers want
activePods, err := plugin.getActivePods()
if err != nil {
return err
}
newPod := &hostport.ActivePod{Pod: pod, IP: ip4}
if err := plugin.hostportHandler.OpenPodHostportsAndSync(newPod, BridgeName, activePods); err != nil {
return err
}
return nil
}
func (plugin *kubenetNetworkPlugin) SetUpPod(namespace string, name string, id kubecontainer.ContainerID) error {
plugin.mu.Lock()
defer plugin.mu.Unlock()
start := time.Now()
defer func() {
glog.V(4).Infof("SetUpPod took %v for %s/%s", time.Since(start), namespace, name)
}()
// TODO: Entire pod object only required for bw shaping and hostport.
pod, ok := plugin.host.GetPodByName(namespace, name)
if !ok {
return fmt.Errorf("pod %q cannot be found", name)
}
if err := plugin.Status(); err != nil {
return fmt.Errorf("Kubenet cannot SetUpPod: %v", err)
}
if err := plugin.setup(namespace, name, id, pod); err != nil {
// Make sure everything gets cleaned up on errors
podIP, _ := plugin.podIPs[id]
if err := plugin.teardown(namespace, name, id, podIP); err != nil {
// Not a hard error or warning
glog.V(4).Infof("Failed to clean up %s/%s after SetUpPod failure: %v", namespace, name, err)
}
// TODO(#34278): Figure out if we need IP GC through the cri.
// The cri should always send us teardown events for stale sandboxes,
// this obviates the need for GC in the common case, for kubenet.
if plugin.host.SupportsLegacyFeatures() {
// TODO: Remove this hack once we've figured out how to retrieve the netns
// of an exited container. Currently, restarting docker will leak a bunch of
// ips. This will exhaust available ip space unless we cleanup old ips. At the
// same time we don't want to try GC'ing them periodically as that could lead
// to a performance regression in starting pods. So on each setup failure, try
// GC on the assumption that the kubelet is going to retry pod creation, and
// when it does, there will be ips.
plugin.ipamGarbageCollection()
}
return err
}
// Need to SNAT outbound traffic from cluster
if err := plugin.ensureMasqRule(); err != nil {
glog.Errorf("Failed to ensure MASQ rule: %v", err)
}
return nil
}
// Tears down as much of a pod's network as it can even if errors occur. Returns
// an aggregate error composed of all errors encountered during the teardown.
func (plugin *kubenetNetworkPlugin) teardown(namespace string, name string, id kubecontainer.ContainerID, podIP string) error {
errList := []error{}
if podIP != "" {
glog.V(5).Infof("Removing pod IP %s from shaper", podIP)
// shaper wants /32
if err := plugin.shaper().Reset(fmt.Sprintf("%s/32", podIP)); err != nil {
// Possible bandwidth shaping wasn't enabled for this pod anyways
glog.V(4).Infof("Failed to remove pod IP %s from shaper: %v", podIP, err)
}
delete(plugin.podIPs, id)
}
if err := plugin.delContainerFromNetwork(plugin.netConfig, network.DefaultInterfaceName, namespace, name, id); err != nil {
// This is to prevent returning error when TearDownPod is called twice on the same pod. This helps to reduce event pollution.
if podIP != "" {
glog.Warningf("Failed to delete container from kubenet: %v", err)
} else {
errList = append(errList, err)
}
}
// The host can choose to not support "legacy" features. The remote
// shim doesn't support it (#35457), but the kubelet does.
if !plugin.host.SupportsLegacyFeatures() {
return utilerrors.NewAggregate(errList)
}
activePods, err := plugin.getActivePods()
if err == nil {
err = plugin.hostportHandler.SyncHostports(BridgeName, activePods)
}
if err != nil {
errList = append(errList, err)
}
return utilerrors.NewAggregate(errList)
}
func (plugin *kubenetNetworkPlugin) TearDownPod(namespace string, name string, id kubecontainer.ContainerID) error {
plugin.mu.Lock()
defer plugin.mu.Unlock()
start := time.Now()
defer func() {
glog.V(4).Infof("TearDownPod took %v for %s/%s", time.Since(start), namespace, name)
}()
if plugin.netConfig == nil {
return fmt.Errorf("Kubenet needs a PodCIDR to tear down pods")
}
// no cached IP is Ok during teardown
podIP, _ := plugin.podIPs[id]
if err := plugin.teardown(namespace, name, id, podIP); err != nil {
return err
}
// Need to SNAT outbound traffic from cluster
if err := plugin.ensureMasqRule(); err != nil {
glog.Errorf("Failed to ensure MASQ rule: %v", err)
}
return nil
}
// TODO: Use the addToNetwork function to obtain the IP of the Pod. That will assume idempotent ADD call to the plugin.
// Also fix the runtime's call to Status function to be done only in the case that the IP is lost, no need to do periodic calls
func (plugin *kubenetNetworkPlugin) GetPodNetworkStatus(namespace string, name string, id kubecontainer.ContainerID) (*network.PodNetworkStatus, error) {
plugin.mu.Lock()
defer plugin.mu.Unlock()
// Assuming the ip of pod does not change. Try to retrieve ip from kubenet map first.
if podIP, ok := plugin.podIPs[id]; ok {
return &network.PodNetworkStatus{IP: net.ParseIP(podIP)}, nil
}
netnsPath, err := plugin.host.GetNetNS(id.ID)
if err != nil {
return nil, fmt.Errorf("Kubenet failed to retrieve network namespace path: %v", err)
}
ip, err := network.GetPodIP(plugin.execer, plugin.nsenterPath, netnsPath, network.DefaultInterfaceName)
if err != nil {
return nil, err
}
plugin.podIPs[id] = ip.String()
return &network.PodNetworkStatus{IP: ip}, nil
}
func (plugin *kubenetNetworkPlugin) Status() error {
// Can't set up pods if we don't have a PodCIDR yet
if plugin.netConfig == nil {
return fmt.Errorf("Kubenet does not have netConfig. This is most likely due to lack of PodCIDR")
}
if !plugin.checkCNIPlugin() {
return fmt.Errorf("could not locate kubenet required CNI plugins %v at %q or %q", requiredCNIPlugins, DefaultCNIDir, plugin.vendorDir)
}
return nil
}
// checkCNIPlugin returns if all kubenet required cni plugins can be found at /opt/cni/bin or user specifed NetworkPluginDir.
func (plugin *kubenetNetworkPlugin) checkCNIPlugin() bool {
if plugin.checkCNIPluginInDir(DefaultCNIDir) || plugin.checkCNIPluginInDir(plugin.vendorDir) {
return true
}
return false
}
// checkCNIPluginInDir returns if all required cni plugins are placed in dir
func (plugin *kubenetNetworkPlugin) checkCNIPluginInDir(dir string) bool {
files, err := ioutil.ReadDir(dir)
if err != nil {
return false
}
for _, cniPlugin := range requiredCNIPlugins {
found := false
for _, file := range files {
if strings.TrimSpace(file.Name()) == cniPlugin {
found = true
break
}
}
if !found {
return false
}
}
return true
}
// getNonExitedPods returns a list of pods that have at least one running container.
func (plugin *kubenetNetworkPlugin) getNonExitedPods() ([]*kubecontainer.Pod, error) {
ret := []*kubecontainer.Pod{}
pods, err := plugin.host.GetRuntime().GetPods(true)
if err != nil {
return nil, fmt.Errorf("Failed to retrieve pods from runtime: %v", err)
}
for _, p := range pods {
if podIsExited(p) {
continue
}
ret = append(ret, p)
}
return ret, nil
}
// Returns a list of pods running or ready to run on this node and each pod's IP address.
// Assumes PodSpecs retrieved from the runtime include the name and ID of containers in
// each pod.
func (plugin *kubenetNetworkPlugin) getActivePods() ([]*hostport.ActivePod, error) {
pods, err := plugin.getNonExitedPods()
if err != nil {
return nil, err
}
activePods := make([]*hostport.ActivePod, 0)
for _, p := range pods {
containerID, err := plugin.host.GetRuntime().GetPodContainerID(p)
if err != nil {
continue
}
ipString, ok := plugin.podIPs[containerID]
if !ok {
continue
}
podIP := net.ParseIP(ipString)
if podIP == nil {
continue
}
if pod, ok := plugin.host.GetPodByName(p.Namespace, p.Name); ok {
activePods = append(activePods, &hostport.ActivePod{
Pod: pod,
IP: podIP,
})
}
}
return activePods, nil
}
// ipamGarbageCollection will release unused IP.
// kubenet uses the CNI bridge plugin, which stores allocated ips on file. Each
// file created under defaultIPAMDir has the format: ip/container-hash. So this
// routine looks for hashes that are not reported by the currently running docker,
// and invokes DelNetwork on each one. Note that this will only work for the
// current CNI bridge plugin, because we have no way of finding the NetNs.
func (plugin *kubenetNetworkPlugin) ipamGarbageCollection() {
glog.V(2).Infof("Starting IP garbage collection")
ipamDir := filepath.Join(defaultIPAMDir, KubenetPluginName)
files, err := ioutil.ReadDir(ipamDir)
if err != nil {
glog.Errorf("Failed to list files in %q: %v", ipamDir, err)
return
}
// gather containerIDs for allocated ips
ipContainerIdMap := make(map[string]string)
for _, file := range files {
// skip non checkpoint file
if ip := net.ParseIP(file.Name()); ip == nil {
continue
}
content, err := ioutil.ReadFile(filepath.Join(ipamDir, file.Name()))
if err != nil {
glog.Errorf("Failed to read file %v: %v", file, err)
}
ipContainerIdMap[file.Name()] = strings.TrimSpace(string(content))
}
// gather infra container IDs of current running Pods
runningContainerIDs := utilsets.String{}
pods, err := plugin.getNonExitedPods()
if err != nil {
glog.Errorf("Failed to get pods: %v", err)
return
}
for _, pod := range pods {
containerID, err := plugin.host.GetRuntime().GetPodContainerID(pod)
if err != nil {
glog.Warningf("Failed to get infra containerID of %q/%q: %v", pod.Namespace, pod.Name, err)
continue
}
runningContainerIDs.Insert(strings.TrimSpace(containerID.ID))
}
// release leaked ips
for ip, containerID := range ipContainerIdMap {
// if the container is not running, release IP
if runningContainerIDs.Has(containerID) {
continue
}
// CNI requires all config to be presented, although only containerID is needed in this case
rt := &libcni.RuntimeConf{
ContainerID: containerID,
IfName: network.DefaultInterfaceName,
// TODO: How do we find the NetNs of an exited container? docker inspect
// doesn't show us the pid, so we probably need to checkpoint
NetNS: "",
}
glog.V(2).Infof("Releasing IP %q allocated to %q.", ip, containerID)
// CNI bridge plugin should try to release IP and then return
if err := plugin.cniConfig.DelNetwork(plugin.netConfig, rt); err != nil {
glog.Errorf("Error while releasing IP: %v", err)
}
}
}
// podIsExited returns true if the pod is exited (all containers inside are exited).
func podIsExited(p *kubecontainer.Pod) bool {
for _, c := range p.Containers {
if c.State != kubecontainer.ContainerStateExited {
return false
}
}
for _, c := range p.Sandboxes {
if c.State != kubecontainer.ContainerStateExited {
return false
}
}
return true
}
func (plugin *kubenetNetworkPlugin) buildCNIRuntimeConf(ifName string, id kubecontainer.ContainerID) (*libcni.RuntimeConf, error) {
netnsPath, err := plugin.host.GetNetNS(id.ID)
if err != nil {
return nil, fmt.Errorf("Kubenet failed to retrieve network namespace path: %v", err)
}
return &libcni.RuntimeConf{
ContainerID: id.ID,
NetNS: netnsPath,
IfName: ifName,
}, nil
}
func (plugin *kubenetNetworkPlugin) addContainerToNetwork(config *libcni.NetworkConfig, ifName, namespace, name string, id kubecontainer.ContainerID) (*cnitypes.Result, error) {
rt, err := plugin.buildCNIRuntimeConf(ifName, id)
if err != nil {
return nil, fmt.Errorf("Error building CNI config: %v", err)
}
glog.V(3).Infof("Adding %s/%s to '%s' with CNI '%s' plugin and runtime: %+v", namespace, name, config.Network.Name, config.Network.Type, rt)
res, err := plugin.cniConfig.AddNetwork(config, rt)
if err != nil {
return nil, fmt.Errorf("Error adding container to network: %v", err)
}
return res, nil
}
func (plugin *kubenetNetworkPlugin) delContainerFromNetwork(config *libcni.NetworkConfig, ifName, namespace, name string, id kubecontainer.ContainerID) error {
rt, err := plugin.buildCNIRuntimeConf(ifName, id)
if err != nil {
return fmt.Errorf("Error building CNI config: %v", err)
}
glog.V(3).Infof("Removing %s/%s from '%s' with CNI '%s' plugin and runtime: %+v", namespace, name, config.Network.Name, config.Network.Type, rt)
if err := plugin.cniConfig.DelNetwork(config, rt); err != nil {
return fmt.Errorf("Error removing container from network: %v", err)
}
return nil
}
// shaper retrieves the bandwidth shaper and, if it hasn't been fetched before,
// initializes it and ensures the bridge is appropriately configured
// This function should only be called while holding the `plugin.mu` lock
func (plugin *kubenetNetworkPlugin) shaper() bandwidth.BandwidthShaper {
if plugin.bandwidthShaper == nil {
plugin.bandwidthShaper = bandwidth.NewTCShaper(BridgeName)
plugin.ensureBridgeTxQueueLen()
plugin.bandwidthShaper.ReconcileInterface()
}
return plugin.bandwidthShaper
}
//TODO: make this into a goroutine and rectify the dedup rules periodically
func (plugin *kubenetNetworkPlugin) syncEbtablesDedupRules(macAddr net.HardwareAddr) {
if plugin.ebtables == nil {
plugin.ebtables = utilebtables.New(plugin.execer)
glog.V(3).Infof("Flushing dedup chain")
if err := plugin.ebtables.FlushChain(utilebtables.TableFilter, dedupChain); err != nil {
glog.Errorf("Failed to flush dedup chain: %v", err)
}
}
_, err := plugin.ebtables.GetVersion()
if err != nil {
glog.Warningf("Failed to get ebtables version. Skip syncing ebtables dedup rules: %v", err)
return
}
glog.V(3).Infof("Filtering packets with ebtables on mac address: %v, gateway: %v, pod CIDR: %v", macAddr.String(), plugin.gateway.String(), plugin.podCidr)
_, err = plugin.ebtables.EnsureChain(utilebtables.TableFilter, dedupChain)
if err != nil {
glog.Errorf("Failed to ensure %v chain %v", utilebtables.TableFilter, dedupChain)
return
}
_, err = plugin.ebtables.EnsureRule(utilebtables.Append, utilebtables.TableFilter, utilebtables.ChainOutput, "-j", string(dedupChain))
if err != nil {
glog.Errorf("Failed to ensure %v chain %v jump to %v chain: %v", utilebtables.TableFilter, utilebtables.ChainOutput, dedupChain, err)
return
}
commonArgs := []string{"-p", "IPv4", "-s", macAddr.String(), "-o", "veth+"}
_, err = plugin.ebtables.EnsureRule(utilebtables.Prepend, utilebtables.TableFilter, dedupChain, append(commonArgs, "--ip-src", plugin.gateway.String(), "-j", "ACCEPT")...)
if err != nil {
glog.Errorf("Failed to ensure packets from cbr0 gateway to be accepted")
return
}
_, err = plugin.ebtables.EnsureRule(utilebtables.Append, utilebtables.TableFilter, dedupChain, append(commonArgs, "--ip-src", plugin.podCidr, "-j", "DROP")...)
if err != nil {
glog.Errorf("Failed to ensure packets from podCidr but has mac address of cbr0 to get dropped.")
return
}
}
// generateHardwareAddr generates 48 bit virtual mac addresses based on the IP input.
func generateHardwareAddr(ip net.IP) (net.HardwareAddr, error) {
if ip.To4() == nil {
return nil, fmt.Errorf("generateHardwareAddr only support valid ipv4 address as input")
}
mac := privateMACPrefix
sections := strings.Split(ip.String(), ".")
for _, s := range sections {
i, _ := strconv.Atoi(s)
mac = mac + ":" + fmt.Sprintf("%02x", i)
}
hwAddr, err := net.ParseMAC(mac)
if err != nil {
return nil, fmt.Errorf("Failed to parse mac address %s generated based on ip %s due to: %v", mac, ip, err)
}
return hwAddr, nil
}<|fim▁end|> | "subnet": "%s",
"gateway": "%s",
"routes": [ |
<|file_name|>categoryslider.js<|end_file_name|><|fim▁begin|>function createCategorySlider(selector)
{
$(document).ready(function(){
var checkforloadedcats = [];
var firstImage = $(selector).find('img').filter(':first');
if(firstImage.length > 0){
checkforloadedcats[selector] = setInterval(function() {
var image = firstImage.get(0);
if (image.complete || image.readyState == 'complete' || image.readyState == 4) {
clearInterval(checkforloadedcats[selector]);
$(selector).flexslider({
namespace: "",
animation: "slide",
easing: "easeInQuart",
slideshow: false,
animationLoop: false,
animationSpeed: 700,
pauseOnHover: true,
controlNav: false,
itemWidth: 238,
minItems: flexmin,
maxItems: flexmax,
<|fim▁hole|> }, 20);
}
$(window).resize(function() {
try {
$(selector).flexslider(0);
if($('#center_column').width()<=280){ $(selector).data('flexslider').setOpts({minItems: 1, maxItems: 1});
}
else if($('#center_column').width()<=440){ $(selector).data('flexslider').setOpts({minItems: grid_size_ms, maxItems: grid_size_ms});}
else if($('#center_column').width()<963){ $(selector).data('flexslider').setOpts({minItems: grid_size_sm, maxItems: grid_size_sm});}
else if($('#center_column').width()>=1240){ $(selector).data('flexslider').setOpts({minItems: grid_size_lg, maxItems: grid_size_lg});}
else if($('#center_column').width()>=963){ $(selector).data('flexslider').setOpts({minItems: grid_size_md, maxItems: grid_size_md});}
} catch(e) {
// handle all your exceptions here
}
});
});
}<|fim▁end|> | move: 0 });
}
|
<|file_name|>app-utils.test.js<|end_file_name|><|fim▁begin|>const test = require('ava');
const {replaceUrls, toInaboxDocument} = require('../app-utils');
test('replaceUrls("minified", ...)', async (t) => {
const mode = 'minified';
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/v0.js"></script>'
),
'<script src="/dist/v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/shadow-v0.js"></script>'
),
'<script src="/dist/shadow-v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/amp4ads-v0.js"></script>'
),
'<script src="/dist/amp4ads-v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/video-iframe-integration-v0.js"></script>'
),
'<script src="/dist/video-iframe-integration-v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-whatever-1.0.css" />'
),
'<link rel="stylesheet" href="/dist/v0/amp-whatever-1.0.css" />'
);
t.is(
replaceUrls(
mode,
`
<head>
<script src="https://cdn.ampproject.org/v0.js"></script>
<script src="https://cdn.ampproject.org/v0/amp-foo-0.1.js"></script>
<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-foo-1.0.css" />
</head>
`
),
`
<head>
<script src="/dist/v0.js"></script>
<script src="/dist/v0/amp-foo-0.1.js"></script>
<link rel="stylesheet" href="/dist/v0/amp-foo-1.0.css" />
</head>
`
);
});
test('replaceUrls("minified", ..., hostName)', async (t) => {
const mode = 'minified';
const hostName = 'https://foo.bar';
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/v0.js"></script>',
hostName
),
'<script src="https://foo.bar/dist/v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/shadow-v0.js"></script>',
hostName
),
'<script src="https://foo.bar/dist/shadow-v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/amp4ads-v0.js"></script>',
hostName
),
'<script src="https://foo.bar/dist/amp4ads-v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/video-iframe-integration-v0.js"></script>',
hostName
),
'<script src="https://foo.bar/dist/video-iframe-integration-v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-whatever-1.0.css" />',
hostName
),
'<link rel="stylesheet" href="https://foo.bar/dist/v0/amp-whatever-1.0.css" />'
);
t.is(
replaceUrls(
mode,
`
<head>
<script src="https://cdn.ampproject.org/v0.js"></script>
<script src="https://cdn.ampproject.org/v0/amp-foo-0.1.js"></script>
<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-foo-1.0.css" />
</head>
`,
hostName
),
`
<head>
<script src="https://foo.bar/dist/v0.js"></script>
<script src="https://foo.bar/dist/v0/amp-foo-0.1.js"></script>
<link rel="stylesheet" href="https://foo.bar/dist/v0/amp-foo-1.0.css" />
</head>
`
);
});
test('replaceUrls("default", ...)', async (t) => {
const mode = 'default';
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/v0.js"></script>'
),
'<script src="/dist/amp.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/shadow-v0.js"></script>'
),
'<script src="/dist/amp-shadow.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/amp4ads-v0.js"></script>'
),
'<script src="/dist/amp-inabox.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/video-iframe-integration-v0.js"></script>'
),
'<script src="/dist/video-iframe-integration.js"></script>'
);
t.is(
replaceUrls(
mode,
'<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-whatever-1.0.css" />'
),
'<link rel="stylesheet" href="/dist/v0/amp-whatever-1.0.css" />'
);
t.is(
replaceUrls(
mode,
`
<head>
<script src="https://cdn.ampproject.org/v0.js"></script>
<script src="https://cdn.ampproject.org/v0/amp-foo-0.1.js"></script>
<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-foo-1.0.css" />
</head>
`
),
`
<head>
<script src="/dist/amp.js"></script>
<script src="/dist/v0/amp-foo-0.1.max.js"></script>
<link rel="stylesheet" href="/dist/v0/amp-foo-1.0.css" />
</head>
`
);
});
test('replaceUrls("default", ..., hostName)', async (t) => {
const mode = 'default';
const hostName = 'https://foo.bar';
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/v0.js"></script>',
hostName
),
'<script src="https://foo.bar/dist/amp.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/shadow-v0.js"></script>',
hostName
),
'<script src="https://foo.bar/dist/amp-shadow.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/amp4ads-v0.js"></script>',
hostName
),
'<script src="https://foo.bar/dist/amp-inabox.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/video-iframe-integration-v0.js"></script>',
hostName
),
'<script src="https://foo.bar/dist/video-iframe-integration.js"></script>'
);
t.is(
replaceUrls(
mode,
'<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-whatever-1.0.css" />',
hostName
),
'<link rel="stylesheet" href="https://foo.bar/dist/v0/amp-whatever-1.0.css" />'
);
t.is(
replaceUrls(
mode,
`
<head>
<script src="https://cdn.ampproject.org/v0.js"></script>
<script src="https://cdn.ampproject.org/v0/amp-foo-0.1.js"></script>
<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-foo-1.0.css" />
</head>
`,
hostName
),
`
<head>
<script src="https://foo.bar/dist/amp.js"></script>
<script src="https://foo.bar/dist/v0/amp-foo-0.1.max.js"></script>
<link rel="stylesheet" href="https://foo.bar/dist/v0/amp-foo-1.0.css" />
</head>
`
);
});
test('replaceUrls(rtv, ...)', async (t) => {
const mode = '123456789012345';
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/v0.js"></script>'
),
'<script src="https://cdn.ampproject.org/rtv/123456789012345/v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/shadow-v0.js"></script>'
),
'<script src="https://cdn.ampproject.org/rtv/123456789012345/shadow-v0.js"></script>'
);
t.is(
replaceUrls(
mode,
'<script src="https://cdn.ampproject.org/amp4ads-v0.js"></script>'
),
'<script src="https://cdn.ampproject.org/rtv/123456789012345/amp4ads-v0.js"></script>'
);
t.is(
replaceUrls(
mode,<|fim▁hole|> );
t.is(
replaceUrls(
mode,
'<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-whatever-1.0.css" />'
),
'<link rel="stylesheet" href="https://cdn.ampproject.org/rtv/123456789012345/v0/amp-whatever-1.0.css" />'
);
t.is(
replaceUrls(
mode,
`
<head>
<script src="https://cdn.ampproject.org/v0.js"></script>
<script src="https://cdn.ampproject.org/v0/amp-foo-0.1.js"></script>
<link rel="stylesheet" href="https://cdn.ampproject.org/v0/amp-foo-1.0.css" />
</head>
`
),
`
<head>
<script src="https://cdn.ampproject.org/rtv/123456789012345/v0.js"></script>
<script src="https://cdn.ampproject.org/rtv/123456789012345/v0/amp-foo-0.1.js"></script>
<link rel="stylesheet" href="https://cdn.ampproject.org/rtv/123456789012345/v0/amp-foo-1.0.css" />
</head>
`
);
});
test('toInaboxDocument(...)', async (t) => {
t.is(
toInaboxDocument(
`<html amp>
<head>
<script src="https://cdn.ampproject.org/v0.js"></script>
<script src="https://cdn.ampproject.org/v0/amp-video-0.1.js"></script>
</head>
</html>`
),
`<html amp4ads>
<head>
<script src="https://cdn.ampproject.org/amp4ads-v0.js"></script>
<script src="https://cdn.ampproject.org/v0/amp-video-0.1.js"></script>
</head>
</html>`
);
});
test('replaceUrls("minified", toInaboxDocument(...))', async (t) => {
const mode = 'minified';
const hostName = '';
t.is(
replaceUrls(
mode,
toInaboxDocument(
'<script src="https://cdn.ampproject.org/v0.js"></script>'
),
hostName
),
'<script src="/dist/amp4ads-v0.js"></script>'
);
});
test('replaceUrls("default", toInaboxDocument(...))', async (t) => {
const mode = 'default';
const hostName = '';
t.is(
replaceUrls(
mode,
toInaboxDocument(
'<script src="https://cdn.ampproject.org/v0.js"></script>'
),
hostName
),
'<script src="/dist/amp-inabox.js"></script>'
);
});
test('replaceUrls(rtv, toInaboxDocument(...))', async (t) => {
const mode = '123456789012345';
const hostName = '';
t.is(
replaceUrls(
mode,
toInaboxDocument(
'<script src="https://cdn.ampproject.org/v0.js"></script>'
),
hostName
),
'<script src="https://cdn.ampproject.org/rtv/123456789012345/amp4ads-v0.js"></script>'
);
});<|fim▁end|> | '<script src="https://cdn.ampproject.org/video-iframe-integration-v0.js"></script>'
),
'<script src="https://cdn.ampproject.org/rtv/123456789012345/video-iframe-integration-v0.js"></script>' |
<|file_name|>playlist_test.go<|end_file_name|><|fim▁begin|>package handlers_test
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/zefer/gompd/mpd"
"github.com/zefer/mothership/handlers"
)
type mockPlClient struct{}
var mockStatus map[string]string = map[string]string{}
func (c mockPlClient) Status() (mpd.Attrs, error) {
return mockStatus, nil
}
var requestedRange [2]int
func (c mockPlClient) PlaylistInfo(start, end int) ([]mpd.Attrs, error) {
requestedRange = [2]int{start, end}
pls := []mpd.Attrs{
{
"file": "Led Zeppelin - Houses Of The Holy/08 - Led Zeppelin - The Ocean.mp3",
"Artist": "Led Zeppelin",
"Title": "The Ocean",
"Album": "Houses of the Holy",
"Last-Modified": "2010-12-09T21:32:02Z",
"Pos": "0",
},
{
"file": "Johnny Cash – Unchained/Johnny Cash – Sea Of Heartbreak.mp3",
"Last-Modified": "2011-10-09T11:45:11Z",
"Pos": "1",
},
{
"file": "http://somestream",
"Name": "HTTP stream from pls",
"Last-Modified": "2011-10-09T11:45:11Z",
"Pos": "2",
},
}
return pls, nil
}
var clearCalled bool = false
func (c mockPlClient) Clear() error {
clearCalled = true
return nil
}
var loadedURI string = ""
func (c mockPlClient) PlaylistLoad(uri string, start, end int) error {
loadedURI = uri
return nil
}
var addedURI string = ""
func (c mockPlClient) Add(uri string) error {
addedURI = uri
return nil
}
var playCalled bool = false
var playedPos int = 0
func (c mockPlClient) Play(pos int) error {
playCalled = true
playedPos = pos
return nil
}
var _ = Describe("PlayListHandler", func() {
var handler http.Handler
var w *httptest.ResponseRecorder
BeforeEach(func() {
called = false
w = httptest.NewRecorder()
})
Context("with disallowed HTTP methods", func() {
var client *mockPlClient
BeforeEach(func() {
client = &mockPlClient{}
handler = handlers.PlayListHandler(client)
})
It("responds with 405 method not allowed", func() {
for _, method := range []string{"PUT", "PATCH", "DELETE"} {
req, _ := http.NewRequest(method, "/playlist", nil)
handler.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusMethodNotAllowed))
Expect(w.Body.String()).To(Equal(""))
}
})
})
Context("with a GET request (list the current playlist)", func() {
var client *mockPlClient
BeforeEach(func() {
client = &mockPlClient{}
handler = handlers.PlayListHandler(client)
})
Describe("the MPD query", func() {
Context("when there are less than 500 items on the playlist", func() {
BeforeEach(func() {
mockStatus = map[string]string{"playlistlength": "12"}
req, _ := http.NewRequest("GET", "/playlist", nil)
handler.ServeHTTP(w, req)
})
It("requests the full playlist from MPD", func() {
Expect(requestedRange[0]).To(Equal(-1))
Expect(requestedRange[1]).To(Equal(-1))
})
})
Context("when there are more than 500 items on the playlist", func() {
Context("when the current playlist position isn't the first song", func() {
BeforeEach(func() {
mockStatus = map[string]string{"playlistlength": "501", "song": "123"}
req, _ := http.NewRequest("GET", "/playlist", nil)
handler.ServeHTTP(w, req)
})
It("requests a slice of the playlist from MPD. Current pos -1 to +500", func() {
Expect(requestedRange[0]).To(Equal(122))
Expect(requestedRange[1]).To(Equal(623))
})
})
Context("when the current playlist position is the first song", func() {
BeforeEach(func() {
mockStatus = map[string]string{"playlistlength": "501", "song": "0"}
req, _ := http.NewRequest("GET", "/playlist", nil)
handler.ServeHTTP(w, req)
})
// Checking we don't query with a negative start index.
It("requests a slice of the playlist from MPD. 0 to 500", func() {
Expect(requestedRange[0]).To(Equal(0))
Expect(requestedRange[1]).To(Equal(500))
})
})
})
})
Describe("the response", func() {
It("responds with 200 OK", func() {
req, _ := http.NewRequest("GET", "/playlist", nil)
handler.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
})
It("responds with the JSON content-type", func() {
req, _ := http.NewRequest("GET", "/playlist", nil)
handler.ServeHTTP(w, req)
Expect(w.HeaderMap["Content-Type"][0]).To(Equal("application/json"))
})
It("responds with a JSON array of playlist items", func() {
req, _ := http.NewRequest("GET", "/playlist", nil)
handler.ServeHTTP(w, req)
var pls []map[string]interface{}
if err := json.NewDecoder(w.Body).Decode(&pls); err != nil {
Fail(fmt.Sprintf("Could not parse JSON %v", err))
}
Expect(len(pls)).To(Equal(3))
// Item 1 has artist & track parts, so we expect "artist - track".
Expect(len(pls[0])).To(Equal(2))
Expect(pls[0]["pos"]).To(BeEquivalentTo(1))
Expect(pls[0]["name"]).To(Equal("Led Zeppelin - The Ocean"))
// Item 2 doesn't have artist & track parts, so we expect "file.mp3".
Expect(len(pls[1])).To(Equal(2))
Expect(pls[1]["pos"]).To(BeEquivalentTo(2))
Expect(pls[1]["name"]).To(Equal("Johnny Cash – Sea Of Heartbreak.mp3"))
// Item 3 has a 'name' field, such as from a loaded pls playlist.
Expect(len(pls[2])).To(Equal(2))
Expect(pls[2]["pos"]).To(BeEquivalentTo(3))
Expect(pls[2]["name"]).To(Equal("HTTP stream from pls"))
})
})
})
Context("with a POST request (update the current playlist)", func() {
var validParams map[string]interface{}
BeforeEach(func() {
validParams = map[string]interface{}{
"uri": "gorilla.mp3", "type": "file", "replace": true, "play": true,
}
})
Describe("POST data validation", func() {
Context("with valid params", func() {
It("responds 204 no content", func() {
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusNoContent))
})
})
Context("with un-parseable JSON", func() {
It("responds 400 bad request", func() {
var json = []byte(`{not-json`)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusBadRequest))
})
})
Context("with missing required fields", func() {
It("responds 400 bad request", func() {
for _, f := range []string{"uri", "type", "replace", "play"} {<|fim▁hole|> for k, v := range validParams {
params[k] = v
}
delete(params, f)
json, _ := json.Marshal(params)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusBadRequest))
}
})
})
// Without URI, we don't know what to add to the playlist.
Context("with an empty 'uri' field", func() {
It("responds 400 bad request", func() {
validParams["uri"] = ""
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusBadRequest))
})
})
})
Context("with replace=true", func() {
It("clears the playlist", func() {
clearCalled = false
validParams["replace"] = true
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(clearCalled).To(BeTrue())
})
})
Context("with replace=false", func() {
It("does not clear the playlist", func() {
clearCalled = false
validParams["replace"] = false
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(clearCalled).To(BeFalse())
})
})
Context("when type='playlist'", func() {
It("loads the given URI", func() {
loadedURI = ""
addedURI = ""
validParams["type"] = "playlist"
validParams["uri"] = "http://gorillas"
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(loadedURI).To(Equal("http://gorillas"))
Expect(addedURI).To(Equal(""))
})
})
Context("when type='directory' or type='file'", func() {
It("adds the given URI", func() {
for _, t := range []string{"directory", "file"} {
loadedURI = ""
addedURI = ""
validParams["type"] = t
validParams["uri"] = "http://gorillas"
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(addedURI).To(Equal("http://gorillas"))
Expect(loadedURI).To(Equal(""))
}
})
})
Context("when play=true", func() {
BeforeEach(func() {
validParams["play"] = true
mockStatus = map[string]string{"playlistlength": "66"}
playedPos = 123
playCalled = false
})
Context("and replace=true", func() {
It("it tells MPD to play from position 0", func() {
validParams["replace"] = true
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(playCalled).To(BeTrue())
Expect(playedPos).To(Equal(0))
})
})
Context("and replace=false", func() {
It("it tells MPD to play from the start of the new added items", func() {
validParams["replace"] = false
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(playCalled).To(BeTrue())
Expect(playedPos).To(Equal(66))
})
})
})
Context("when play=false", func() {
It("it does not tell MPD to play", func() {
playCalled = false
validParams["play"] = false
json, _ := json.Marshal(validParams)
req, _ := http.NewRequest("POST", "/playlist", bytes.NewBuffer(json))
handler.ServeHTTP(w, req)
Expect(playCalled).To(BeFalse())
})
})
})
})<|fim▁end|> | // d = map[string]string{"uri": "", "type": "", "replace": "", "play": ""}
params := make(map[string]interface{}) |
<|file_name|>coverage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import os, sys, glob, pickle, subprocess
sys.path.insert(0, os.path.dirname(__file__))
from clang import cindex
sys.path = sys.path[1:]
def configure_libclang():
llvm_libdirs = ['/usr/lib/llvm-3.2/lib', '/usr/lib64/llvm']
try:
libdir = subprocess.check_output(['llvm-config', '--libdir']).decode('utf-8').strip()
llvm_libdirs.insert(0, libdir)
except OSError:
pass
for d in llvm_libdirs:
if not os.path.exists(d):
continue
files = glob.glob(os.path.join(d, 'libclang.so*'))
if len(files) != 0:
cindex.Config.set_library_file(files[0])
return
class Call:
def __init__(self, cursor, decl):
self.ident = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
self.decl_filename = decl.location.file.name.decode('utf-8')
class Definition:
def __init__(self, cursor):
self.ident = cursor.spelling.decode('utf-8')
self.display = cursor.displayname.decode('utf-8')
self.filename = cursor.location.file.name.decode('utf-8')
ex = cursor.extent
self.start_line = ex.start.line
self.start_column = ex.start.column
self.end_line = ex.end.line
self.end_column = ex.end.column
def process_diagnostics(tu):
diagnostics = tu.diagnostics
haserr = False
for d in diagnostics:
sys.stderr.write('{0}\n'.format(d.format.decode('utf-8')))
if d.severity > cindex.Diagnostic.Warning:
haserr = True
if haserr:
sys.exit(1)
def walk_cursors(tu, files):
proc = list(tu.cursor.get_children())
while len(proc) > 0:
cursor = proc[0]
proc = proc[1:]
if cursor.location.file is None:
continue
fname = cursor.location.file.name.decode('utf-8')
if fname in files:
yield cursor
proc += list(cursor.get_children())
def newer(a, b):
try:
return os.stat(a).st_mtime > os.stat(b).st_mtime
except:
return True
def scan_libgit2_glib(cflags, files, git2dir):
files = [os.path.abspath(f) for f in files]
dname = os.path.dirname(__file__)
allcalls = {}
l = 0
if not os.getenv('SILENT'):
sys.stderr.write('\n')
i = 0
for f in files:
if not os.getenv('SILENT'):
name = os.path.basename(f)
if len(name) > l:
l = len(name)
perc = int((i / len(files)) * 100)
sys.stderr.write('[{0: >3}%] Processing ... {1}{2}\r'.format(perc, name, ' ' * (l - len(name))))
i += 1
astf = os.path.join(dname, '.' + os.path.basename(f) + '.cache')
if not newer(f, astf):
with open(astf, 'rb') as fo:
calls = pickle.load(fo)
else:
tu = cindex.TranslationUnit.from_source(f, cflags)
process_diagnostics(tu)
calls = {}
for cursor in walk_cursors(tu, files):
if cursor.kind == cindex.CursorKind.CALL_EXPR or \
cursor.kind == cindex.CursorKind.DECL_REF_EXPR:
cdecl = cursor.get_referenced()
if cdecl.kind != cindex.CursorKind.FUNCTION_DECL:
continue<|fim▁hole|> fdefname = cdecl.location.file.name.decode('utf-8')
if fdefname.startswith(git2dir):
call = Call(cursor, cdecl)
if call.ident in calls:
calls[call.ident].append(call)
else:
calls[call.ident] = [call]
with open(astf, 'wb') as fo:
pickle.dump(calls, fo)
for k in calls:
if k in allcalls:
allcalls[k] += calls[k]
else:
allcalls[k] = list(calls[k])
if not os.getenv('SILENT'):
sys.stderr.write('\r[100%] Processing ... done{0}\n'.format(' ' * (l - 4)))
return allcalls
def scan_libgit2(cflags, git2dir):
tu = cindex.TranslationUnit.from_source(git2dir + '.h', cflags)
process_diagnostics(tu)
headers = glob.glob(os.path.join(git2dir, '*.h'))
defs = {}
objapi = ['lookup', 'lookup_prefix', 'free', 'id', 'owner']
objderiv = ['commit', 'tree', 'tag', 'blob']
ignore = set()
for deriv in objderiv:
for api in objapi:
ignore.add('git_' + deriv + '_' + api)
for cursor in walk_cursors(tu, headers):
if cursor.kind == cindex.CursorKind.FUNCTION_DECL:
deff = Definition(cursor)
if not deff.ident in ignore:
defs[deff.ident] = deff
return defs
configure_libclang()
pos = sys.argv.index('--')
cflags = sys.argv[1:pos]
files = sys.argv[pos+1:]
incdir = os.getenv('LIBGIT2_INCLUDE_DIR')
defs = scan_libgit2(cflags, incdir)
calls = scan_libgit2_glib(cflags, files, incdir)
notused = {}
perfile = {}
nperfile = {}
for d in defs:
o = defs[d]
if not d in calls:
notused[d] = defs[d]
if not o.filename in nperfile:
nperfile[o.filename] = [o]
else:
nperfile[o.filename].append(o)
if not o.filename in perfile:
perfile[o.filename] = [o]
else:
perfile[o.filename].append(o)
ss = [notused[f] for f in notused]
ss.sort(key=lambda x: '{0} {1}'.format(os.path.basename(x.filename), x.ident))
lastf = None
keys = list(perfile.keys())
keys.sort()
for filename in keys:
b = os.path.basename(filename)
f = perfile[filename]
n_perfile = len(f)
if filename in nperfile:
n_nperfile = len(nperfile[filename])
else:
n_nperfile = 0
perc = int(((n_perfile - n_nperfile) / n_perfile) * 100)
print('\n File {0}, coverage {1}% ({2} out of {3}):'.format(b, perc, n_perfile - n_nperfile, n_perfile))
cp = list(f)
cp.sort(key=lambda x: "{0} {1}".format(not x.ident in calls, x.ident))
for d in cp:
if d.ident in calls:
print(' \033[32m✓ {0}\033[0m'.format(d.display))
else:
print(' \033[31m✗ {0}\033[0m'.format(d.display))
perc = int(((len(defs) - len(notused)) / len(defs)) * 100)
print('\nTotal coverage: {0}% ({1} functions out of {2} are being called)\n'.format(perc, len(defs) - len(notused), len(defs)))
# vi:ts=4:et<|fim▁end|> |
if (not cdecl is None) and (not cdecl.location.file is None): |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>export interface BrocfileOptions {
/**
* Environment setting. Default: development
* This option is set by the --environment/--prod/--dev CLI argument.
* This option can be used to conditionally affect a build pipeline in order to load different plugins for
* development/production/testing
*/
env: string;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>UIUtils.java<|end_file_name|><|fim▁begin|>/*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Public License v1.0 which
* accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* SEARCH Group, Incorporated - initial API and implementation
*
*/
package org.search.niem.uml.ui.acceptance_tests;
import static org.search.niem.uml.ui.util.UIExt.select;
import org.eclipse.jface.window.IShellProvider;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
public class UIUtils {
public static IWorkbench get_the_workbench() {
return PlatformUI.getWorkbench();
}
public static IWorkbenchWindow get_the_active_workbench_window() {
return get_the_workbench().getActiveWorkbenchWindow();
}
public static IWorkbenchPage get_the_active_workbench_page() {
return get_the_active_workbench_window().getActivePage();
}
public static IEditorPart get_the_active_editor() {
return get_the_active_workbench_page().getActiveEditor();
}
@SuppressWarnings("unchecked")
private static <V extends IViewPart> V find_the_view(final String id) {
for (final IWorkbenchPage p : get_the_active_workbench_window().getPages()) {
final IViewPart view = p.findView(id);
if (view != null) {
return (V) view;
}
}
return null;<|fim▁hole|> public static <V extends IViewPart> V activate_the_view(final String id) throws PartInitException {
final IViewPart theView = find_the_view(id);
if (theView == null) {
return (V) get_the_active_workbench_page().showView(id);
}
theView.setFocus();
return (V) theView;
}
public static void close_all_open_editors() {
for (final IWorkbenchWindow w : get_the_workbench().getWorkbenchWindows()) {
for (final IWorkbenchPage p : w.getPages()) {
p.closeAllEditors(false);
}
}
}
public static void select_the_default_button(final IShellProvider provider) {
select(provider.getShell().getDefaultButton());
}
}<|fim▁end|> | }
@SuppressWarnings("unchecked") |
<|file_name|>EventhubInner.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.eventhubs.v2017_04_01.implementation;
import java.util.List;
import org.joda.time.DateTime;
import com.microsoft.azure.management.eventhubs.v2017_04_01.EntityStatus;
import com.microsoft.azure.management.eventhubs.v2017_04_01.CaptureDescription;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.rest.serializer.JsonFlatten;
import com.microsoft.azure.ProxyResource;
/**
* Single item in List or Get Event Hub operation.
*/
@JsonFlatten
public class EventhubInner extends ProxyResource {
/**
* Current number of shards on the Event Hub.
*/
@JsonProperty(value = "properties.partitionIds", access = JsonProperty.Access.WRITE_ONLY)
private List<String> partitionIds;
/**
* Exact time the Event Hub was created.
*/
@JsonProperty(value = "properties.createdAt", access = JsonProperty.Access.WRITE_ONLY)
private DateTime createdAt;
/**
* The exact time the message was updated.
*/
@JsonProperty(value = "properties.updatedAt", access = JsonProperty.Access.WRITE_ONLY)
private DateTime updatedAt;
/**
* Number of days to retain the events for this Event Hub, value should be
* 1 to 7 days.
*/
@JsonProperty(value = "properties.messageRetentionInDays")
private Long messageRetentionInDays;
/**
* Number of partitions created for the Event Hub, allowed values are from
* 1 to 32 partitions.
*/
@JsonProperty(value = "properties.partitionCount")
private Long partitionCount;
/**
* Enumerates the possible values for the status of the Event Hub. Possible
* values include: 'Active', 'Disabled', 'Restoring', 'SendDisabled',
* 'ReceiveDisabled', 'Creating', 'Deleting', 'Renaming', 'Unknown'.
*/
@JsonProperty(value = "properties.status")
private EntityStatus status;
/**
* Properties of capture description.
*/<|fim▁hole|> @JsonProperty(value = "properties.captureDescription")
private CaptureDescription captureDescription;
/**
* Get current number of shards on the Event Hub.
*
* @return the partitionIds value
*/
public List<String> partitionIds() {
return this.partitionIds;
}
/**
* Get exact time the Event Hub was created.
*
* @return the createdAt value
*/
public DateTime createdAt() {
return this.createdAt;
}
/**
* Get the exact time the message was updated.
*
* @return the updatedAt value
*/
public DateTime updatedAt() {
return this.updatedAt;
}
/**
* Get number of days to retain the events for this Event Hub, value should be 1 to 7 days.
*
* @return the messageRetentionInDays value
*/
public Long messageRetentionInDays() {
return this.messageRetentionInDays;
}
/**
* Set number of days to retain the events for this Event Hub, value should be 1 to 7 days.
*
* @param messageRetentionInDays the messageRetentionInDays value to set
* @return the EventhubInner object itself.
*/
public EventhubInner withMessageRetentionInDays(Long messageRetentionInDays) {
this.messageRetentionInDays = messageRetentionInDays;
return this;
}
/**
* Get number of partitions created for the Event Hub, allowed values are from 1 to 32 partitions.
*
* @return the partitionCount value
*/
public Long partitionCount() {
return this.partitionCount;
}
/**
* Set number of partitions created for the Event Hub, allowed values are from 1 to 32 partitions.
*
* @param partitionCount the partitionCount value to set
* @return the EventhubInner object itself.
*/
public EventhubInner withPartitionCount(Long partitionCount) {
this.partitionCount = partitionCount;
return this;
}
/**
* Get enumerates the possible values for the status of the Event Hub. Possible values include: 'Active', 'Disabled', 'Restoring', 'SendDisabled', 'ReceiveDisabled', 'Creating', 'Deleting', 'Renaming', 'Unknown'.
*
* @return the status value
*/
public EntityStatus status() {
return this.status;
}
/**
* Set enumerates the possible values for the status of the Event Hub. Possible values include: 'Active', 'Disabled', 'Restoring', 'SendDisabled', 'ReceiveDisabled', 'Creating', 'Deleting', 'Renaming', 'Unknown'.
*
* @param status the status value to set
* @return the EventhubInner object itself.
*/
public EventhubInner withStatus(EntityStatus status) {
this.status = status;
return this;
}
/**
* Get properties of capture description.
*
* @return the captureDescription value
*/
public CaptureDescription captureDescription() {
return this.captureDescription;
}
/**
* Set properties of capture description.
*
* @param captureDescription the captureDescription value to set
* @return the EventhubInner object itself.
*/
public EventhubInner withCaptureDescription(CaptureDescription captureDescription) {
this.captureDescription = captureDescription;
return this;
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""tvnamer - Automagical TV episode renamer<|fim▁hole|>Uses data from www.thetvdb.com (via tvdb_api) to rename TV episode files from
"some.show.name.s01e01.blah.avi" to "Some Show Name - [01x01] - The First.avi"
"""
__version__ = "3.0.0"
__author__ = "dbr/Ben"<|fim▁end|> | |
<|file_name|>testConvert.py<|end_file_name|><|fim▁begin|>import unittest
from convert import convert
<|fim▁hole|>class TestConvert(unittest.TestCase):
def testEmptyJsonParse(self):
generated = convert.parse(convert._load_json_files("./jsonSamples/minimal.json")[0])
def testGlossaryJsonParse(self):
generated = convert.parse(convert._load_json_files("./jsonSamples/Glossary.json")[0])
generated = convert.generate("Test", ["cs"], generated)
for f in generated:
print "".join(f["content"])<|fim▁end|> | |
<|file_name|>eqpt_paddler.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from odoo import models, fields, api
from ./eqpt_equipment import EQPT_TYPES
class Paddler(models.Model):
_name = 'eqpt.paddler'
_description = "Paddler Cycle Equipment"
_description = "Cycle paddler equipment"<|fim▁hole|>
eqpt_type = fields.Selection(selection=EQPT_TYPES, string="")
eqpt_id = fields.Reference(selection='_get_eqpt_models', string="Equipment")
cycle_id = fields.Many2one(comodel_name='pac.cycle', string="Cycle")
member_id = fields.Many2one(comodel_name='adm.asso.member', string="Member")<|fim▁end|> | |
<|file_name|>inter_vn_stats.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
*/
#include "inter_vn_stats.h"
#include <oper/interface.h>
#include <oper/mirror_table.h>
using namespace std;
InterVnStatsCollector::VnStatsSet *InterVnStatsCollector::Find(string vn) {
VnStatsMap::iterator it = inter_vn_stats_.find(vn);
if (it != inter_vn_stats_.end()) {
return it->second;
}
return NULL;
}
void InterVnStatsCollector::PrintAll() {
VnStatsMap::iterator it = inter_vn_stats_.begin();
while(it != inter_vn_stats_.end()) {
PrintVn(it->first);
it++;
}
}
void InterVnStatsCollector::PrintVn(string vn) {
VnStatsSet *stats_set;
VnStats *stats;
LOG(DEBUG, "...........Stats for Vn " << vn);
VnStatsMap::iterator it = inter_vn_stats_.find(vn);
if (it != inter_vn_stats_.end()) {
stats_set = it->second;
/* Remove all the elements of map entry value which is a set */
VnStatsSet::iterator stats_it = stats_set->begin();
while(stats_it != stats_set->end()) {
stats = *stats_it;
stats_it++;
LOG(DEBUG, " Other-VN " << stats->dst_vn);
LOG(DEBUG, " in_pkts " << stats->in_pkts << " in_bytes " << stats->in_bytes);
LOG(DEBUG, " out_pkts " << stats->out_pkts << " out_bytes " << stats->out_bytes);
}
}
}
void InterVnStatsCollector::Remove(string vn) {
VnStatsSet *stats_set;
VnStats *stats;
VnStatsMap::iterator it = inter_vn_stats_.find(vn);
if (it != inter_vn_stats_.end()) {
stats_set = it->second;
/* Remove the entry from the inter_vn_stats_ map */
inter_vn_stats_.erase(it);
/* Remove all the elements of map entry value which is a set */
VnStatsSet::iterator stats_it = stats_set->begin();
VnStatsSet::iterator del_it;
while(stats_it != stats_set->end()) {
stats = *stats_it;
delete stats;
del_it = stats_it;
stats_it++;
stats_set->erase(del_it);
}
delete stats_set;
}
}
void InterVnStatsCollector::UpdateVnStats(FlowEntry *fe, uint64_t bytes,
uint64_t pkts) {
string src_vn = fe->data.source_vn, dst_vn = fe->data.dest_vn;
if (!fe->data.source_vn.length())
src_vn = *FlowHandler::UnknownVn();
if (!fe->data.dest_vn.length())
dst_vn = *FlowHandler::UnknownVn();
if (fe->local_flow) {
VnStatsUpdateInternal(src_vn, dst_vn, bytes, pkts, true);
VnStatsUpdateInternal(dst_vn, src_vn, bytes, pkts, false);
} else {
if (fe->data.ingress) {
VnStatsUpdateInternal(src_vn, dst_vn, bytes, pkts, true);
} else {
VnStatsUpdateInternal(dst_vn, src_vn, bytes, pkts, false);
}
}
//PrintAll();
}
void InterVnStatsCollector::VnStatsUpdateInternal(string src_vn, string dst_vn,
uint64_t bytes, uint64_t pkts,
bool outgoing) {
VnStatsSet *stats_set;
VnStats *stats;
VnStatsMap::iterator it = inter_vn_stats_.find(src_vn);<|fim▁hole|>
if (it == inter_vn_stats_.end()) {
stats = new VnStats(dst_vn, bytes, pkts, outgoing);
stats_set = new VnStatsSet;
stats_set->insert(stats);
inter_vn_stats_.insert(make_pair(src_vn, stats_set));
} else {
stats_set = it->second;
VnStats key(dst_vn, 0, 0, false);
VnStatsSet::iterator stats_it = stats_set->find(&key);
if (stats_it == stats_set->end()) {
stats = new VnStats(dst_vn, bytes, pkts, outgoing);
stats_set->insert(stats);
} else {
stats = *stats_it;
if (outgoing) {
stats->out_bytes += bytes;
stats->out_pkts += pkts;
} else {
stats->in_bytes += bytes;
stats->in_pkts += pkts;
}
}
}
}<|fim▁end|> | |
<|file_name|>import_util.py<|end_file_name|><|fim▁begin|>import random
import requests
import shutil
import logging
import os
import traceback
import ujson
from typing import List, Dict, Any, Optional, Set, Callable, Iterable, Tuple, TypeVar
from django.forms.models import model_to_dict
from zerver.models import Realm, RealmEmoji, Subscription, Recipient, \
Attachment, Stream, Message, UserProfile
from zerver.data_import.sequencer import NEXT_ID
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS as stream_colors
from zerver.lib.avatar_hash import user_avatar_path_from_ids
from zerver.lib.parallel import run_parallel, JobData
# stubs
ZerverFieldsT = Dict[str, Any]
def build_zerver_realm(realm_id: int, realm_subdomain: str, time: float,
other_product: str) -> List[ZerverFieldsT]:
realm = Realm(id=realm_id, date_created=time,
name=realm_subdomain, string_id=realm_subdomain,
description=("Organization imported from %s!" % (other_product)))
auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods]
realm_dict = model_to_dict(realm, exclude='authentication_methods')
realm_dict['authentication_methods'] = auth_methods
return[realm_dict]
def build_user_profile(avatar_source: str,
date_joined: Any,
delivery_email: str,
email: str,
full_name: str,
id: int,
is_active: bool,
is_realm_admin: bool,
is_guest: bool,
is_mirror_dummy: bool,
realm_id: int,
short_name: str,
timezone: Optional[str]) -> ZerverFieldsT:
pointer = -1
obj = UserProfile(
avatar_source=avatar_source,
date_joined=date_joined,
delivery_email=delivery_email,
email=email,
full_name=full_name,
id=id,
is_active=is_active,
is_realm_admin=is_realm_admin,
is_guest=is_guest,
pointer=pointer,
realm_id=realm_id,
short_name=short_name,
timezone=timezone,
)
dct = model_to_dict(obj)
return dct
def build_avatar(zulip_user_id: int, realm_id: int, email: str, avatar_url: str,
timestamp: Any, avatar_list: List[ZerverFieldsT]) -> None:
avatar = dict(
path=avatar_url, # Save original avatar url here, which is downloaded later
realm_id=realm_id,
content_type=None,
user_profile_id=zulip_user_id,
last_modified=timestamp,
user_profile_email=email,
s3_path="",
size="")
avatar_list.append(avatar)
def make_subscriber_map(zerver_subscription: List[ZerverFieldsT]) -> Dict[int, Set[int]]:
'''
This can be convenient for building up UserMessage
rows.
'''
subscriber_map = dict() # type: Dict[int, Set[int]]
for sub in zerver_subscription:
user_id = sub['user_profile']
recipient_id = sub['recipient']
if recipient_id not in subscriber_map:
subscriber_map[recipient_id] = set()
subscriber_map[recipient_id].add(user_id)
return subscriber_map
def build_subscription(recipient_id: int, user_id: int,
subscription_id: int) -> ZerverFieldsT:
subscription = Subscription(
color=random.choice(stream_colors),
id=subscription_id)
subscription_dict = model_to_dict(subscription, exclude=['user_profile', 'recipient_id'])
subscription_dict['user_profile'] = user_id
subscription_dict['recipient'] = recipient_id
return subscription_dict
def build_public_stream_subscriptions(
zerver_userprofile: List[ZerverFieldsT],
zerver_recipient: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
'''
This function is only used for Hipchat now, but it may apply to
future conversions. We often don't get full subscriber data in
the Hipchat export, so this function just autosubscribes all
users to every public stream. This returns a list of Subscription
dicts.
'''
subscriptions = [] # type: List[ZerverFieldsT]
public_stream_ids = {
stream['id']
for stream in zerver_stream
if not stream['invite_only']
}
public_stream_recipient_ids = {
recipient['id']
for recipient in zerver_recipient
if recipient['type'] == Recipient.STREAM
and recipient['type_id'] in public_stream_ids
}
user_ids = [
user['id']
for user in zerver_userprofile
]
for recipient_id in public_stream_recipient_ids:
for user_id in user_ids:
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_private_stream_subscriptions(
get_users: Callable[..., Set[int]],
zerver_recipient: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
subscriptions = [] # type: List[ZerverFieldsT]
stream_ids = {
stream['id']
for stream in zerver_stream
if stream['invite_only']
}
recipient_map = {
recipient['id']: recipient['type_id'] # recipient_id -> stream_id
for recipient in zerver_recipient
if recipient['type'] == Recipient.STREAM
and recipient['type_id'] in stream_ids
}
for recipient_id, stream_id in recipient_map.items():
user_ids = get_users(stream_id=stream_id)
for user_id in user_ids:
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_personal_subscriptions(zerver_recipient: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
subscriptions = [] # type: List[ZerverFieldsT]
personal_recipients = [
recipient
for recipient in zerver_recipient
if recipient['type'] == Recipient.PERSONAL
]
for recipient in personal_recipients:
recipient_id = recipient['id']
user_id = recipient['type_id']
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_recipient(type_id: int, recipient_id: int, type: int) -> ZerverFieldsT:
recipient = Recipient(
type_id=type_id, # stream id
id=recipient_id,
type=type)
recipient_dict = model_to_dict(recipient)
return recipient_dict
def build_recipients(zerver_userprofile: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
'''
As of this writing, we only use this in the HipChat
conversion. The Slack and Gitter conversions do it more
tightly integrated with creating other objects.
'''
recipients = []
for user in zerver_userprofile:
type_id = user['id']
type = Recipient.PERSONAL
recipient = Recipient(
type_id=type_id,
id=NEXT_ID('recipient'),
type=type,
)
recipient_dict = model_to_dict(recipient)
recipients.append(recipient_dict)
for stream in zerver_stream:
type_id = stream['id']
type = Recipient.STREAM
recipient = Recipient(
type_id=type_id,
id=NEXT_ID('recipient'),
type=type,
)
recipient_dict = model_to_dict(recipient)
recipients.append(recipient_dict)
return recipients
def build_realm(zerver_realm: List[ZerverFieldsT], realm_id: int,
domain_name: str) -> ZerverFieldsT:
realm = dict(zerver_client=[{"name": "populate_db", "id": 1},
{"name": "website", "id": 2},
{"name": "API", "id": 3}],
zerver_customprofilefield=[],
zerver_customprofilefieldvalue=[],
zerver_userpresence=[], # shows last logged in data, which is not available
zerver_userprofile_mirrordummy=[],
zerver_realmdomain=[{"realm": realm_id,
"allow_subdomains": False,
"domain": domain_name,
"id": realm_id}],
zerver_useractivity=[],
zerver_realm=zerver_realm,
zerver_huddle=[],<|fim▁hole|> zerver_reaction=[],
zerver_realmemoji=[],
zerver_realmfilter=[])
return realm
def build_usermessages(zerver_usermessage: List[ZerverFieldsT],
subscriber_map: Dict[int, Set[int]],
recipient_id: int,
mentioned_user_ids: List[int],
message_id: int) -> None:
user_ids = subscriber_map.get(recipient_id, set())
if user_ids:
for user_id in sorted(user_ids):
is_mentioned = user_id in mentioned_user_ids
# Slack and Gitter don't yet triage private messages.
# It's possible we don't even get PMs from them.
is_private = False
usermessage = build_user_message(
user_id=user_id,
message_id=message_id,
is_private=is_private,
is_mentioned=is_mentioned,
)
zerver_usermessage.append(usermessage)
def build_user_message(user_id: int,
message_id: int,
is_private: bool,
is_mentioned: bool) -> ZerverFieldsT:
flags_mask = 1 # For read
if is_mentioned:
flags_mask += 8 # For mentioned
if is_private:
flags_mask += 2048 # For is_private
id = NEXT_ID('user_message')
usermessage = dict(
id=id,
user_profile=user_id,
message=message_id,
flags_mask=flags_mask,
)
return usermessage
def build_defaultstream(realm_id: int, stream_id: int,
defaultstream_id: int) -> ZerverFieldsT:
defaultstream = dict(
stream=stream_id,
realm=realm_id,
id=defaultstream_id)
return defaultstream
def build_stream(date_created: Any, realm_id: int, name: str,
description: str, stream_id: int, deactivated: bool=False,
invite_only: bool=False) -> ZerverFieldsT:
stream = Stream(
name=name,
deactivated=deactivated,
description=description,
date_created=date_created,
invite_only=invite_only,
id=stream_id)
stream_dict = model_to_dict(stream,
exclude=['realm'])
stream_dict['realm'] = realm_id
return stream_dict
def build_message(topic_name: str, pub_date: float, message_id: int, content: str,
rendered_content: Optional[str], user_id: int, recipient_id: int,
has_image: bool=False, has_link: bool=False,
has_attachment: bool=True) -> ZerverFieldsT:
zulip_message = Message(
rendered_content_version=1, # this is Zulip specific
pub_date=pub_date,
id=message_id,
content=content,
rendered_content=rendered_content,
has_image=has_image,
has_attachment=has_attachment,
has_link=has_link)
zulip_message.set_topic_name(topic_name)
zulip_message_dict = model_to_dict(zulip_message,
exclude=['recipient', 'sender', 'sending_client'])
zulip_message_dict['sender'] = user_id
zulip_message_dict['sending_client'] = 1
zulip_message_dict['recipient'] = recipient_id
return zulip_message_dict
def build_attachment(realm_id: int, message_ids: Set[int],
user_id: int, fileinfo: ZerverFieldsT, s3_path: str,
zerver_attachment: List[ZerverFieldsT]) -> None:
"""
This function should be passed a 'fileinfo' dictionary, which contains
information about 'size', 'created' (created time) and ['name'] (filename).
"""
attachment_id = NEXT_ID('attachment')
attachment = Attachment(
id=attachment_id,
size=fileinfo['size'],
create_time=fileinfo['created'],
is_realm_public=True,
path_id=s3_path,
file_name=fileinfo['name'])
attachment_dict = model_to_dict(attachment,
exclude=['owner', 'messages', 'realm'])
attachment_dict['owner'] = user_id
attachment_dict['messages'] = list(message_ids)
attachment_dict['realm'] = realm_id
zerver_attachment.append(attachment_dict)
def process_avatars(avatar_list: List[ZerverFieldsT], avatar_dir: str, realm_id: int,
threads: int, size_url_suffix: str='') -> List[ZerverFieldsT]:
"""
This function gets the avatar of the user and saves it in the
user's avatar directory with both the extensions '.png' and '.original'
Required parameters:
1. avatar_list: List of avatars to be mapped in avatars records.json file
2. avatar_dir: Folder where the downloaded avatars are saved
3. realm_id: Realm ID.
We use this for Slack and Gitter conversions, where avatars need to be
downloaded. For simpler conversions see write_avatar_png.
"""
def get_avatar(avatar_upload_item: List[str]) -> None:
avatar_url = avatar_upload_item[0]
image_path = os.path.join(avatar_dir, avatar_upload_item[1])
original_image_path = os.path.join(avatar_dir, avatar_upload_item[2])
response = requests.get(avatar_url + size_url_suffix, stream=True)
with open(image_path, 'wb') as image_file:
shutil.copyfileobj(response.raw, image_file)
shutil.copy(image_path, original_image_path)
logging.info('######### GETTING AVATARS #########\n')
logging.info('DOWNLOADING AVATARS .......\n')
avatar_original_list = []
avatar_upload_list = []
for avatar in avatar_list:
avatar_hash = user_avatar_path_from_ids(avatar['user_profile_id'], realm_id)
avatar_url = avatar['path']
avatar_original = dict(avatar)
image_path = ('%s.png' % (avatar_hash))
original_image_path = ('%s.original' % (avatar_hash))
avatar_upload_list.append([avatar_url, image_path, original_image_path])
# We don't add the size field here in avatar's records.json,
# since the metadata is not needed on the import end, and we
# don't have it until we've downloaded the files anyway.
avatar['path'] = image_path
avatar['s3_path'] = image_path
avatar_original['path'] = original_image_path
avatar_original['s3_path'] = original_image_path
avatar_original_list.append(avatar_original)
# Run downloads parallely
output = []
for (status, job) in run_parallel_wrapper(get_avatar, avatar_upload_list, threads=threads):
output.append(job)
logging.info('######### GETTING AVATARS FINISHED #########\n')
return avatar_list + avatar_original_list
def write_avatar_png(avatar_folder: str,
realm_id: int,
user_id: int,
bits: bytes) -> ZerverFieldsT:
'''
Use this function for conversions like Hipchat where
the bits for the .png file come in something like
a users.json file, and where we don't have to
fetch avatar images externally.
'''
avatar_hash = user_avatar_path_from_ids(
user_profile_id=user_id,
realm_id=realm_id,
)
image_fn = avatar_hash + '.original'
image_path = os.path.join(avatar_folder, image_fn)
with open(image_path, 'wb') as image_file:
image_file.write(bits)
# Return metadata that eventually goes in records.json.
metadata = dict(
path=image_path,
s3_path=image_path,
realm_id=realm_id,
user_profile_id=user_id,
)
return metadata
ListJobData = TypeVar('ListJobData')
def run_parallel_wrapper(f: Callable[[ListJobData], None], full_items: List[ListJobData],
threads: int=6) -> Iterable[Tuple[int, List[ListJobData]]]:
logging.info("Distributing %s items across %s threads" % (len(full_items), threads))
def wrapping_function(items: List[ListJobData]) -> int:
count = 0
for item in items:
try:
f(item)
except Exception:
logging.info("Error processing item: %s" % (item,))
traceback.print_exc()
count += 1
if count % 1000 == 0:
logging.info("A download thread finished %s items" % (count,))
return 0
job_lists = [full_items[i::threads] for i in range(threads)] # type: List[List[ListJobData]]
return run_parallel(wrapping_function, job_lists, threads=threads)
def process_uploads(upload_list: List[ZerverFieldsT], upload_dir: str,
threads: int) -> List[ZerverFieldsT]:
"""
This function downloads the uploads and saves it in the realm's upload directory.
Required parameters:
1. upload_list: List of uploads to be mapped in uploads records.json file
2. upload_dir: Folder where the downloaded uploads are saved
"""
def get_uploads(upload: List[str]) -> None:
upload_url = upload[0]
upload_path = upload[1]
upload_path = os.path.join(upload_dir, upload_path)
response = requests.get(upload_url, stream=True)
os.makedirs(os.path.dirname(upload_path), exist_ok=True)
with open(upload_path, 'wb') as upload_file:
shutil.copyfileobj(response.raw, upload_file)
logging.info('######### GETTING ATTACHMENTS #########\n')
logging.info('DOWNLOADING ATTACHMENTS .......\n')
upload_url_list = []
for upload in upload_list:
upload_url = upload['path']
upload_s3_path = upload['s3_path']
upload_url_list.append([upload_url, upload_s3_path])
upload['path'] = upload_s3_path
# Run downloads parallely
output = []
for (status, job) in run_parallel_wrapper(get_uploads, upload_url_list, threads=threads):
output.append(job)
logging.info('######### GETTING ATTACHMENTS FINISHED #########\n')
return upload_list
def build_realm_emoji(realm_id: int,
name: str,
id: int,
file_name: str) -> ZerverFieldsT:
return model_to_dict(
RealmEmoji(
realm_id=realm_id,
name=name,
id=id,
file_name=file_name,
)
)
def process_emojis(zerver_realmemoji: List[ZerverFieldsT], emoji_dir: str,
emoji_url_map: ZerverFieldsT, threads: int) -> List[ZerverFieldsT]:
"""
This function downloads the custom emojis and saves in the output emoji folder.
Required parameters:
1. zerver_realmemoji: List of all RealmEmoji objects to be imported
2. emoji_dir: Folder where the downloaded emojis are saved
3. emoji_url_map: Maps emoji name to its url
"""
def get_emojis(upload: List[str]) -> None:
emoji_url = upload[0]
emoji_path = upload[1]
upload_emoji_path = os.path.join(emoji_dir, emoji_path)
response = requests.get(emoji_url, stream=True)
os.makedirs(os.path.dirname(upload_emoji_path), exist_ok=True)
with open(upload_emoji_path, 'wb') as emoji_file:
shutil.copyfileobj(response.raw, emoji_file)
emoji_records = []
upload_emoji_list = []
logging.info('######### GETTING EMOJIS #########\n')
logging.info('DOWNLOADING EMOJIS .......\n')
for emoji in zerver_realmemoji:
emoji_url = emoji_url_map[emoji['name']]
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id=emoji['realm'],
emoji_file_name=emoji['name'])
upload_emoji_list.append([emoji_url, emoji_path])
emoji_record = dict(emoji)
emoji_record['path'] = emoji_path
emoji_record['s3_path'] = emoji_path
emoji_record['realm_id'] = emoji_record['realm']
emoji_record.pop('realm')
emoji_records.append(emoji_record)
# Run downloads parallely
output = []
for (status, job) in run_parallel_wrapper(get_emojis, upload_emoji_list, threads=threads):
output.append(job)
logging.info('######### GETTING EMOJIS FINISHED #########\n')
return emoji_records
def create_converted_data_files(data: Any, output_dir: str, file_path: str) -> None:
output_file = output_dir + file_path
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, 'w') as fp:
ujson.dump(data, fp, indent=4)<|fim▁end|> | zerver_userprofile_crossrealm=[],
zerver_useractivityinterval=[], |
<|file_name|>test_header.py<|end_file_name|><|fim▁begin|>from fooster.web import web
import pytest
test_key = 'Magical'
test_value = 'header'
test_header = test_key + ': ' + test_value + '\r\n'
poor_key = 'not'
poor_value = 'good'
poor_header = poor_key + ':' + poor_value + '\r\n'
good_header = poor_key + ': ' + poor_value + '\r\n'
case_key = 'wEIrd'
case_key_title = case_key.title()
case_value = 'cAse'
case_header = case_key + ': ' + case_value + '\r\n'
case_header_test = case_key + ': ' + test_value + '\r\n'
nonstr_key = 6
nonstr_value = None
def test_add_get():
headers = web.HTTPHeaders()
headers.add(test_header)
assert headers.get(test_key) == test_value
def test_add_getlist():
headers = web.HTTPHeaders()
headers.add(test_header)
assert headers.getlist(test_key) == [test_value]
def test_add_getitem():
headers = web.HTTPHeaders()
headers.add(test_header)
assert headers[test_key] == test_value
def test_getitem_empty():
headers = web.HTTPHeaders()
with pytest.raises(KeyError):
headers[test_key]
def test_getlist_empty():
headers = web.HTTPHeaders()
with pytest.raises(KeyError):
headers.getlist(test_key)
def test_getlist_default():
headers = web.HTTPHeaders()
assert headers.getlist(test_key, []) == []
def test_set_remove():
headers = web.HTTPHeaders()
headers.set(test_key, test_value)
assert headers.get(test_key) == test_value
headers.remove(test_key)
def test_set_multiple():
headers = web.HTTPHeaders()
headers.set(test_key, test_value)
headers.set(test_key, test_value)
assert headers.get(test_key) == test_value
assert headers.getlist(test_key) == [test_value] * 2
def test_set_overwrite():
headers = web.HTTPHeaders()
headers.set(test_key, test_value, True)
headers.set(test_key, test_value, True)
assert headers.get(test_key) == test_value
assert headers.getlist(test_key) == [test_value]
def test_setitem_delitem():
headers = web.HTTPHeaders()
headers[test_key] = test_value
assert headers[test_key] == test_value
del headers[test_key]
def test_remove_empty():
headers = web.HTTPHeaders()
with pytest.raises(KeyError):
headers.remove(test_key)
def test_delitem_empty():
headers = web.HTTPHeaders()
with pytest.raises(KeyError):
del headers[test_key]
def test_retrieve():
headers = web.HTTPHeaders()
headers.set(test_key, test_value)
assert headers.retrieve(test_key) == test_header
def test_len():
headers = web.HTTPHeaders()
headers.set(test_key, test_value)
assert len(headers) == 1
headers.set(poor_key, poor_value)
assert len(headers) == 2
def test_multiple_add_get_len_retrieve():
headers = web.HTTPHeaders()
headers.add(case_header)
assert len(headers) == 1
assert headers.get(case_key) == case_value
assert headers.getlist(case_key) == [case_value]
assert headers.retrieve(case_key) == case_header
headers.add(case_header)
assert len(headers) == 1
assert headers.get(case_key) == case_value
assert headers.getlist(case_key) == [case_value] * 2
assert headers.retrieve(case_key) == case_header + case_header
headers.add(case_header_test)
assert len(headers) == 1
assert headers.get(case_key) == test_value
assert headers.getlist(case_key) == [case_value] * 2 + [test_value]
assert headers.retrieve(case_key) == case_header + case_header + case_header_test
def test_multiple_set_get_len_retrieve():
headers = web.HTTPHeaders()
headers.set(case_key, case_value)
assert len(headers) == 1
assert headers.get(case_key) == case_value
assert headers.getlist(case_key) == [case_value]
assert headers.retrieve(case_key) == case_header
headers.set(case_key, case_value)
assert len(headers) == 1
assert headers.get(case_key) == case_value
assert headers.getlist(case_key) == [case_value] * 2
assert headers.retrieve(case_key) == case_header + case_header
headers.set(case_key, test_value)
assert len(headers) == 1
assert headers.get(case_key) == test_value
assert headers.getlist(case_key) == [case_value] * 2 + [test_value]
assert headers.retrieve(case_key) == case_header + case_header + case_header_test
def test_clear():
headers = web.HTTPHeaders()
headers.set(test_key, test_value)
headers.set(poor_key, poor_value)
headers.clear()
assert len(headers) == 0
def test_case():
headers = web.HTTPHeaders()
headers.set(case_key, case_value)
assert headers.get(case_key_title) == case_value
assert headers.retrieve(case_key_title) == case_header
def test_iter():
headers = web.HTTPHeaders()
headers.set(test_key, test_value)
headers.set(poor_key, poor_value)
headers.set(case_key, case_value)
header_list = []
for header in headers:
header_list.append(header)
assert test_header in header_list
assert good_header in header_list
assert case_header in header_list
<|fim▁hole|>
headers.set(test_key, test_value)
headers.set(poor_key, poor_value)
headers.set(case_key, case_value)
assert test_key in headers
assert poor_key in headers
assert case_key in headers
assert test_key.upper() in headers
assert poor_key.upper() in headers
assert case_key.upper() in headers
assert test_key.lower() in headers
assert poor_key.lower() in headers
assert case_key.lower() in headers
def test_poor_header():
headers = web.HTTPHeaders()
headers.add(poor_header)
assert headers.get(poor_key) == poor_value
def test_set_key_nonstr():
headers = web.HTTPHeaders()
with pytest.raises(TypeError):
headers.set(nonstr_key, test_value)
def test_set_value_nonstr():
headers = web.HTTPHeaders()
with pytest.raises(TypeError):
headers.set(test_key, nonstr_value)<|fim▁end|> | def test_contains():
headers = web.HTTPHeaders() |
<|file_name|>model.computed.constant.py<|end_file_name|><|fim▁begin|># The content of this file was generated using the Python profile of libCellML 0.2.0.
from enum import Enum
from math import *
__version__ = "0.3.0"
LIBCELLML_VERSION = "0.2.0"
STATE_COUNT = 4
VARIABLE_COUNT = 18
class VariableType(Enum):
VARIABLE_OF_INTEGRATION = 1
STATE = 2<|fim▁hole|> CONSTANT = 3
COMPUTED_CONSTANT = 4
ALGEBRAIC = 5
EXTERNAL = 6
VOI_INFO = {"name": "time", "units": "millisecond", "component": "environment", "type": VariableType.VARIABLE_OF_INTEGRATION}
STATE_INFO = [
{"name": "m", "units": "dimensionless", "component": "sodium_channel_m_gate", "type": VariableType.STATE},
{"name": "h", "units": "dimensionless", "component": "sodium_channel_h_gate", "type": VariableType.STATE},
{"name": "n", "units": "dimensionless", "component": "potassium_channel_n_gate", "type": VariableType.STATE},
{"name": "V", "units": "millivolt", "component": "membrane", "type": VariableType.STATE}
]
VARIABLE_INFO = [
{"name": "g_L", "units": "milliS_per_cm2", "component": "leakage_current", "type": VariableType.CONSTANT},
{"name": "Cm", "units": "microF_per_cm2", "component": "membrane", "type": VariableType.CONSTANT},
{"name": "E_R", "units": "millivolt", "component": "membrane", "type": VariableType.CONSTANT},
{"name": "g_K", "units": "milliS_per_cm2", "component": "potassium_channel", "type": VariableType.CONSTANT},
{"name": "g_Na", "units": "milliS_per_cm2", "component": "sodium_channel", "type": VariableType.CONSTANT},
{"name": "i_Stim", "units": "microA_per_cm2", "component": "membrane", "type": VariableType.ALGEBRAIC},
{"name": "E_L", "units": "millivolt", "component": "leakage_current", "type": VariableType.EXTERNAL},
{"name": "i_L", "units": "microA_per_cm2", "component": "leakage_current", "type": VariableType.ALGEBRAIC},
{"name": "E_Na", "units": "millivolt", "component": "sodium_channel", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_Na", "units": "microA_per_cm2", "component": "sodium_channel", "type": VariableType.ALGEBRAIC},
{"name": "alpha_m", "units": "per_millisecond", "component": "sodium_channel_m_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_m", "units": "per_millisecond", "component": "sodium_channel_m_gate", "type": VariableType.ALGEBRAIC},
{"name": "alpha_h", "units": "per_millisecond", "component": "sodium_channel_h_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_h", "units": "per_millisecond", "component": "sodium_channel_h_gate", "type": VariableType.ALGEBRAIC},
{"name": "E_K", "units": "millivolt", "component": "potassium_channel", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_K", "units": "microA_per_cm2", "component": "potassium_channel", "type": VariableType.ALGEBRAIC},
{"name": "alpha_n", "units": "per_millisecond", "component": "potassium_channel_n_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_n", "units": "per_millisecond", "component": "potassium_channel_n_gate", "type": VariableType.ALGEBRAIC}
]
def leq_func(x, y):
return 1.0 if x <= y else 0.0
def geq_func(x, y):
return 1.0 if x >= y else 0.0
def and_func(x, y):
return 1.0 if bool(x) & bool(y) else 0.0
def create_states_array():
return [nan]*STATE_COUNT
def create_variables_array():
return [nan]*VARIABLE_COUNT
def initialise_states_and_constants(states, variables):
variables[0] = 0.3
variables[1] = 1.0
variables[2] = 0.0
variables[3] = 36.0
variables[4] = 120.0
states[0] = 0.05
states[1] = 0.6
states[2] = 0.325
states[3] = 0.0
def compute_computed_constants(variables):
variables[8] = variables[2]-115.0
variables[14] = variables[2]+12.0
def compute_rates(voi, states, rates, variables, external_variable):
variables[10] = 0.1*(states[3]+25.0)/(exp((states[3]+25.0)/10.0)-1.0)
variables[11] = 4.0*exp(states[3]/18.0)
rates[0] = variables[10]*(1.0-states[0])-variables[11]*states[0]
variables[12] = 0.07*exp(states[3]/20.0)
variables[13] = 1.0/(exp((states[3]+30.0)/10.0)+1.0)
rates[1] = variables[12]*(1.0-states[1])-variables[13]*states[1]
variables[16] = 0.01*(states[3]+10.0)/(exp((states[3]+10.0)/10.0)-1.0)
variables[17] = 0.125*exp(states[3]/80.0)
rates[2] = variables[16]*(1.0-states[2])-variables[17]*states[2]
variables[5] = -20.0 if and_func(geq_func(voi, 10.0), leq_func(voi, 10.5)) else 0.0
variables[6] = external_variable(voi, states, rates, variables, 6)
variables[7] = variables[0]*(states[3]-variables[6])
variables[15] = variables[3]*pow(states[2], 4.0)*(states[3]-variables[14])
variables[9] = variables[4]*pow(states[0], 3.0)*states[1]*(states[3]-variables[8])
rates[3] = -(-variables[5]+variables[9]+variables[15]+variables[7])/variables[1]
def compute_variables(voi, states, rates, variables, external_variable):
variables[7] = variables[0]*(states[3]-variables[6])
variables[9] = variables[4]*pow(states[0], 3.0)*states[1]*(states[3]-variables[8])
variables[10] = 0.1*(states[3]+25.0)/(exp((states[3]+25.0)/10.0)-1.0)
variables[11] = 4.0*exp(states[3]/18.0)
variables[12] = 0.07*exp(states[3]/20.0)
variables[13] = 1.0/(exp((states[3]+30.0)/10.0)+1.0)
variables[15] = variables[3]*pow(states[2], 4.0)*(states[3]-variables[14])
variables[16] = 0.01*(states[3]+10.0)/(exp((states[3]+10.0)/10.0)-1.0)
variables[17] = 0.125*exp(states[3]/80.0)<|fim▁end|> | |
<|file_name|>Bank.py<|end_file_name|><|fim▁begin|>from random import uniform as randfloat
class BankAccount:
'A simple class to store money.'
money = 0
owner = ""
def __init__(self, owner, money):
self.owner = owner
self.money = round(money, 2)
def getOwner(self):
return self.owner
def getMoney(self):
return self.money
def deposit(self, amount):
self.money = round(self.money + amount, 2)
return True
def withdraw(self, amount):
if amount > self.money:
print("Sorry, you do not have enough money to complete this transaction.")
return False
else:
self.money -= amount
return True
class ATM:
'A simple class to distribute money.'
def use(self, bankaccount):
if not bankaccount.__class__.__name__ == "BankAccount":
print("Not a BankAcount!")
print(" Welcome %s" % bankaccount.getOwner())
choice = ""
while not (choice == "1" or choice == "2" or choice == "3"):
choice = raw_input('''
Choose an Option:
1: Get Amount of Money
2: Deposit Money
3: Withdraw
''')
if choice == "1":
print("You have $%2.2f." % bankaccount.getMoney())
else:
while True:
try:
amount = float(raw_input("How much money? $"))
break;
except ValueError:
print
if choice == "2":
bankaccount.deposit(amount)
else:<|fim▁hole|> bankaccount.withdraw(amount)
print("Done!")
print("Have a nice day!")
account = BankAccount(raw_input("What is your name? => "), randfloat(0.00, 50.00))
while True:
ATM().use(account)<|fim▁end|> | |
<|file_name|>vec-push.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() { let mut v = ~[1, 2, 3]; v.push(1); }<|fim▁end|> | |
<|file_name|>item_26_multiple_inheritance_for_mixin_only.py<|end_file_name|><|fim▁begin|>import json
from collections import abc
# item 26: use muptiple inheritance for mixin only
# a mixin that transforms a python object to a dictionary that's ready for seralization
class ToDictMixin(object):
def to_dict(self):
"""Return a dictionary representation of this object"""<|fim▁hole|>
def _traverse(self, key, obj):
"""Return a dictionary representation of this obj"""
if isinstance(obj, ToDictMixin):
return obj.to_dict()
if isinstance(obj, dict):
return {k: self._traverse(k, v) for k, v in obj.items()}
if isinstance(obj, tuple) or isinstance(obj, list):
return [self._traverse(key, item) for item in obj]
# if it's any other object with __dict__ attr, use it!
if hasattr(obj, '__dict__'):
return self._traverse(key, obj.__dict__)
return obj
class BinaryTreeNode(ToDictMixin):
def __init__(self, value, left=None, right=None):
self.value = value
self.left = left
self.right = right
class BinaryTreeWithParent(BinaryTreeNode):
def __init__(self, value, left=None, right=None, parent=None):
super().__init__(value, left, right)
self.parent = parent
# override so the backref to parent does not cause infinite recursion
def _traverse(self, key, obj):
# if the key is parent, stop the recursion and return parent's value instead
if key == 'parent' and isinstance(obj, BinaryTreeNode):
return obj.value
return super()._traverse(key, obj)
class NamedSubTree(ToDictMixin):
def __init__(self, name, tree):
self.name = name
self.tree = tree
# Mixins can also play together
class ToJsonMixin(object):
@classmethod
def from_json(cls, kwargs):
"""given kwargs in json format, get it into dictionary format"""
kwargs = json.loads(kwargs)
return cls(**kwargs)
def to_json(self):
d = self.to_dict()
return json.dumps(d)
class BinaryTreeWithJson(BinaryTreeNode, ToJsonMixin):
pass
class EqualityMixin(object):
def __eq__(self, other):
return self.__dict__ == other.__dict__
class Switch(EqualityMixin):
def __init__(self, ports, speed):
self.ports = ports
self.speed = speed
class Machine(EqualityMixin):
def __init__(self, ram, cpu, disk):
self.ram = ram
self.cpu = cpu
self.disk = disk
class DatacenterRack(ToJsonMixin, ToDictMixin, EqualityMixin):
def __init__(self, switch, machines):
self.switch = Switch(**switch)
self.machines = [Machine(**kwargs) for kwargs in machines]<|fim▁end|> | return self._traverse('none', self.__dict__) |
<|file_name|>graphics.js<|end_file_name|><|fim▁begin|>/**
* Created by Dennis Schwartz on 16/12/15.
*/
var THREE = require('three');
var TrackballControls = require('three.trackball');
var OrthographicTrackballControls = require('three.orthographictrackball');
var Layouts = require('./layouts');
var Fixed = Layouts.connectedMultilayer;
var ForceDirectedLayered = Layouts.independentMultilayer;
var Manual = Layouts.manual;
var R = require('ramda');
var Defaults = require('./defaults');
function Graphics ( state ) {
var stable = false;
var maxWeight = state.elements.maxWeight;
// Attach the current three instance to the state
state.THREE = THREE;
/*
Create the three.js canvas/WebGL renderer
*/
state.renderer = createRenderer( state.visEl );
state.scene = new THREE.Scene();
createCamera( state );
/*
Create Layout with elements in state
*/
var layout = createLayout( state );
/*
Layouts specify renderers and UI builders
*/
var nodeRenderer = layout.nodeRenderer;
//var linkRenderer = layout.linkRenderer;
/**
* This sets the default node rendering function
*/
//var linkRenderer = function ( link ) {
// console.log(link);
// console.log(state.elements.elements[link.from]);
//
// var from = nodeUI[link.from.substring(2)];
// var to = nodeUI[link.to.substring(2)];
// link.geometry.vertices[0].set(from.position.x,
// from.position.y,
// from.position.z);
// link.geometry.vertices[1].set(to.position.x,
// to.position.y,
// to.position.z);
// link.geometry.verticesNeedUpdate = true;
//};
var nodeUIBuilder = layout.nodeUIBuilder;
var linkUIBuilder = layout.linkUIBuilder;
/*
Create ui (look) of every element and add it to the element object
*/
var nodes = state.elements.nodelayers; // TODO: Save only IDs in these lists
var edges = state.elements.edges;
nodes.forEach(function (n) {
createNodeUI(state.elements.elements['nl' + n.data.id]);
});
edges.forEach(function (e) {
var toID = e.data.target.substring(2);
var fromID = e.data.source.substring(2);
var link = state.elements.elements[ 'e' + fromID + toID ];
createLinkUI(link);
});
console.log(state);
/*
Create controls if set
*/
/**
* Create the UI for each node-layer in the network and add them to the scene
* @param node
*/
function createNodeUI(node) {
if (!node.ui) {
node.ui = nodeUIBuilder(node);
console.log('hello!');
node.position = layout.getNodePosition(node);
var layers = R.map(function (i) { return i.data['id'] }, state.elements.layers);
node.position.z = layers.indexOf('l' + node.data['layer']) * state.interLayerDistance;
}
state.scene.add(node.ui);
//console.log("added");
//console.log(node.ui);
}
/**
* Create the UI for each link and add it to the scene
* @param link
*/
function createLinkUI(link) {
if (!link.ui) {
var from = link.data['source'];
var to = link.data['target'];
link.ui = linkUIBuilder(link);
link.ui.from = from;
link.ui.to = to;
}
state.scene.add(link.ui);
}
/**
* This is the main Animation loop calling requestAnimationFrame on window
* which in turn calls back to this function
*/
function run () {
//if ( stop ) return;
window.requestAnimationFrame( run );
if (!stable) {
stable = layout.step();
}
renderFrame ();
state.controls.update ();
}
/**
* Create three.js state
* @param state
* @returns {THREE.PerspectiveCamera}
*/
function createCamera ( state ) {
var container = state.renderer.domElement;
var camera;
var controls;
if ( state.cameraType === 'orthographic' ) {
// Create camera
camera = new THREE.OrthographicCamera( container.width / 2,
container.width / -2,
container.height / 2,
container.height / -2, 1, 1000 );
camera.position.x = 200;
camera.position.y = 100;
camera.position.z = 300;
camera.lookAt(state.scene.position);
// Create corresponding controls if necessary
if ( state.zoomingEnabled ) controls = new OrthographicTrackballControls(camera, state.renderer.domElement);
} else { // Default case
camera = new THREE.PerspectiveCamera(45, container.clientWidth / container.clientHeight, 0.1, 30000);
if ( state.zoomingEnabled ) controls = new TrackballControls(camera, state.renderer.domElement);
}
camera.position.z = 400;
state.camera = camera;
if (state.zoomingEnabled) {
controls.panSpeed = 0.8;
controls.staticMoving = true;
controls.dynamicDampingFactor = 0.3;
controls.addEventListener('change', renderFrame);
state.controls = controls;
}
}
/**
* This function calculates and sets
* the current position of each ui-element each frame.
*/
function renderFrame() {
//Alternative version
nodes.forEach(function ( node ) {
var n = state.elements.elements[ 'nl' + node.data.id ];
nodeRenderer( n );
});
if ( state.directed ) {
var arrowScale = 0.25;
edges.forEach(function ( edge ) {
var toID = edge.data.target.substring(2);
var fromID = edge.data.source.substring(2);
var link = state.elements.elements[ 'e' + fromID + toID ];
var from = state.elements.elements[ edge.data.source ];
var to = state.elements.elements[ edge.data.target ];
var newSourcePos = new THREE.Vector3(from.ui.position.x,
from.ui.position.y,
from.ui.position.z);
var newTargetPos = new THREE.Vector3(to.ui.position.x,
to.ui.position.y,
to.ui.position.z);
var arrowVec = newTargetPos.clone().sub(newSourcePos);
// targetPos + norm(neg(arrowVec)) * (nodesize / 2)
var nodeRadVec = arrowVec.clone().negate().normalize().multiplyScalar(state.nodesize || 6);
var cursor = newTargetPos.clone().add(nodeRadVec); // point
link.ui.geometry.vertices[0].set(cursor.x, cursor.y, cursor.z);
link.ui.geometry.vertices[3].set(cursor.x, cursor.y, cursor.z);
cursor.add(nodeRadVec.multiplyScalar(1.5)); //arrowHeadBase
var arrowHeadBase = cursor.clone();
var flanker = nodeRadVec.clone().cross(new THREE.Vector3(0,0,1)).multiplyScalar(arrowScale);
var w = link.data.weight || 1;
var factor = 1;
if ( maxWeight === 0 ) {
factor = .6 - (.6 / (w + .1));
} else {
if ( state.normalisation === 'log' ) {
factor = 0.6 * ( Math.log(w) / Math.log(maxWeight) );
} else {
factor = 0.6 * ( w / maxWeight );
}
}
var ribboner = flanker.clone().multiplyScalar(factor);
var flank1 = cursor.clone().add(flanker); //flank 1
link.ui.geometry.vertices[1].set(flank1.x, flank1.y, flank1.z);
flanker.add(flanker.negate().multiplyScalar(arrowScale * 2));
cursor.add(flanker); //flank 2
link.ui.geometry.vertices[2].set(cursor.x, cursor.y, cursor.z);
// Move to Ribbon 1
cursor = arrowHeadBase.clone().add(ribboner);
link.ui.geometry.vertices[4].set(cursor.x, cursor.y, cursor.z);
// Move to Ribbon 2
cursor = arrowHeadBase.clone().add(ribboner.negate());
link.ui.geometry.vertices[5].set(cursor.x, cursor.y, cursor.z);
var temp = newTargetPos.clone().add(newSourcePos).divideScalar(2);
// Move to source
// RibbonSrc1
cursor = newSourcePos.clone().add(ribboner).add(nodeRadVec.negate().multiplyScalar(1.3));
link.ui.geometry.vertices[6].set(cursor.x, cursor.y, cursor.z);
// RibbonSrc2
cursor = newSourcePos.clone().add(ribboner.negate()).add(nodeRadVec);
link.ui.geometry.vertices[7].set(cursor.x, cursor.y, cursor.z);
link.ui.material.color.set(0x000000);
//link.ui.material.transparent = true;
//link.ui.material.opacity = 0.4;
link.ui.geometry.verticesNeedUpdate = true;
//link.ui.geometry.elementsNeedUpdate = true;
//var distance = newSourcePos.distanceTo(newTargetPos);
//var position = newTargetPos.clone().add(newSourcePos).divideScalar(2);
//var orientation = new THREE.Matrix4();//a new orientation matrix to offset pivot
//var offsetRotation = new THREE.Matrix4();//a matrix to fix pivot rotation
//var offsetPosition = new THREE.Matrix4();//a matrix to fix pivot position
//orientation.lookAt(newSourcePos, newTargetPos, new THREE.Vector3(0,1,0));
//offsetRotation.makeRotationX(HALF_PI);//rotate 90 degs on X
//orientation.multiply(offsetRotation);//combine orientation with rotation transformations
//var cylinder = link.ui.geometry;//new THREE.CylinderGeometry(1.2,1.2,distance,1,1,false);
////cylinder.applyMatrix(orientation);
//link.ui.scale.y = distance;
//link.ui.geometry = cylinder;
//link.ui.position.set(position.x, position.y, position.z);
//console.log("After");
//console.log(link.ui);
});
} else {
edges.forEach(function ( edge ) {
var toID = edge.data.target.substring(2);
var fromID = edge.data.source.substring(2);
var link = state.elements.elements[ 'e' + fromID + toID ];
var from = state.elements.elements[ edge.data.source ];
var to = state.elements.elements[ edge.data.target ];
link.ui.geometry.vertices[0].set(from.ui.position.x,
from.ui.position.y,
from.ui.position.z);
link.ui.geometry.vertices[1].set(to.ui.position.x,
to.ui.position.y,
to.ui.position.z);
link.ui.geometry.verticesNeedUpdate = true;
});
}
state.renderer.render(state.scene, state.camera);
}
function rebuildUI () {
//Object.keys(nodeUI).forEach(function (nodeId) {
// scene.remove(nodeUI[nodeId]);
//});
//nodeUI = {};
//
//Object.keys(linkUI).forEach(function (linkId) {
// scene.remove(linkUI[linkId]);
//});
//linkUI = {};
//
//
//network.get( 'nodes' ).forEach(function (n) {
// createNodeUI(n);
//});
//network.get( 'edges' ).forEach(function (e) {
// createLinkUI(e);
//});
// Remove old UI
nodes.forEach(function (n) {
var node = state.elements.elements['nl' + n.data.id];
state.scene.remove(node.ui);
node.ui = undefined;
});
edges.forEach(function (e) {
var toID = e.data.target.substring(2);
var fromID = e.data.source.substring(2);
var link = state.elements.elements[ 'e' + fromID + toID ];
state.scene.remove(link.ui);
link.ui = undefined;
});
// Create new UI
nodes.forEach(function (n) {
createNodeUI(state.elements.elements['nl' + n.data.id]);
});
edges.forEach(function (e) {
var toID = e.data.target.substring(2);
var fromID = e.data.source.substring(2);
var link = state.elements.elements[ 'e' + fromID + toID ];
createLinkUI(link);
});
}
/**
* Check if the given Layout was already instantiated or is only a name.
* If a name -> create new Layout
* @param state
* @returns {*}
*/
function createLayout ( state ) {
var input = state.layout;
input = input === undefined ? 'ForceDirectedLayered' : input.name;
network = state.elements;
console.log(state);
if ( typeof input === 'string' ) {
var layout;
if ( input === 'Fixed' ) {
console.log(state.physicsSettings);
return new Fixed( network, state );
}
<|fim▁hole|> if ( input === 'ForceDirectedLayered' ) {
return new ForceDirectedLayered( network, state );
}
if ( input === 'ForceDirected' ) {
return new ForceDirected(network, settings);
}
if ( input === 'Manual' ) {
return new Manual( state.elements );
}
} else if ( input ) {
return input;
}
throw new Error ( "The layout " + input + " could not be created!" );
}
return {
THREE: THREE,
run: run,
resetStable: function () {
stable = false;
layout.resetStable();
},
/**
* You can set the nodeUIBuilder function yourself
* allowing for custom UI settings
* @param callback
*/
setNodeUI: function (callback) {
nodeUIBuilder = callback;
rebuildUI();
return this;
},
/**
* You can set the nodeUIBuilder function yourself
* allowing for custom UI settings
* @param callback
*/
setLinkUI: function (callback) {
linkUIBuilder = callback;
rebuildUI();
return this;
}
};
/**
* Create the three.js renderer
* @param container
* @returns {*}
*/
function createRenderer ( container ) {
var webGlSupport = webgl_detect();
var renderer = webGlSupport ? new THREE.WebGLRenderer( { container: container, antialias: true } ) : new THREE.CanvasRenderer( container );
var width = container.clientWidth || window.innerWidth;
var height = container.clientHeight || window.innerHeight;
renderer.setSize( width, height );
renderer.setClearColor( 0xffffff, 1 );
console.log(renderer);
if ( container ) {
container.appendChild( renderer.domElement );
} else {
document.body.appendChild( renderer.domElement );
}
return renderer;
}
/**
* http://stackoverflow.com/questions/11871077/proper-way-to-detect-webgl-support
* @param return_context
* @returns {*}
*/
function webgl_detect(return_context) {
if (!!window.WebGLRenderingContext) {
var canvas = document.createElement("canvas"),
names = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"],
context = false;
for(var i=0;i<4;i++) {
try {
context = canvas.getContext(names[i]);
if (context && typeof context.getParameter == "function") {
// WebGL is enabled
if (return_context) {
// return WebGL object if the function's argument is present
return {name:names[i], gl:context};
}
// else, return just true
return true;
}
} catch(e) {}
}
// WebGL is supported, but disabled
return false;
}
// WebGL not supported
return false;
}
}
module.exports = Graphics;<|fim▁end|> | |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>import redmodel.containers
class Error(Exception):<|fim▁hole|>
class BadArgsError(Error):
pass
UniqueError = redmodel.containers.UniqueError<|fim▁end|> | pass
class NotFoundError(Error):
pass |
<|file_name|>surface.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012-2014 André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
const {
assertSame,
assertDataProperty,
assertBuiltinConstructor,
assertBuiltinPrototype,
} = Assert;
function assertFunctionProperty(object, name, value = object[name]) {
return assertDataProperty(object, name, {value, writable: true, enumerable: false, configurable: true});
}
function assertCreateFunctionProperty(object, name, value = object[name]) {
return assertDataProperty(object, name, {value, writable: false, enumerable: false, configurable: true});
}
function assertConstructorProperty(object, name = "constructor", value = object[name]) {
return assertDataProperty(object, name, {value, writable: true, enumerable: false, configurable: true});
}
function assertPrototypeProperty(object, name = "prototype", value = object[name]) {
return assertDataProperty(object, name, {value, writable: false, enumerable: false, configurable: false});
}
/* Promise Objects */
assertBuiltinConstructor(Promise, "Promise", 1);
assertBuiltinPrototype(Promise.prototype);
assertSame(Promise, Promise.prototype.constructor);
/* Properties of the Promise Constructor */
assertPrototypeProperty(Promise);
assertCreateFunctionProperty(Promise, Symbol.create);
assertFunctionProperty(Promise, "all");
assertFunctionProperty(Promise, "cast");<|fim▁hole|>assertFunctionProperty(Promise, "reject");
assertFunctionProperty(Promise, "resolve");
/* Properties of the Promise Prototype Object */
assertConstructorProperty(Promise.prototype);
assertFunctionProperty(Promise.prototype, "catch");
assertFunctionProperty(Promise.prototype, "then");<|fim▁end|> | assertFunctionProperty(Promise, "race"); |
<|file_name|>train_util.py<|end_file_name|><|fim▁begin|># Copyright 2022 The DDSP Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Library of training functions."""
import inspect
import json
import os
import time
from absl import logging
from ddsp.training import cloud
import gin
import tensorflow.compat.v2 as tf
# ---------------------- Helper Functions --------------------------------------
def get_strategy(tpu='', cluster_config=''):
"""Create a distribution strategy for running on accelerators.
For CPU, single-GPU, or multi-GPU jobs on a single machine, call this function
without args to return a MirroredStrategy.
For TPU jobs, specify an address to the `tpu` argument.
For multi-machine GPU jobs, specify a `cluster_config` argument of the cluster
configuration.
Args:
tpu: Address of the TPU. No TPU if left blank.
cluster_config: Should be specified only for multi-worker jobs.
Task specific dictionary for cluster config dict in the TF_CONFIG format.
https://www.tensorflow.org/guide/distributed_training#setting_up_tf_config_environment_variable
If passed as a string, will be parsed to a dictionary. Two components
should be specified: cluster and task. Cluster provides information about
the training cluster, which is a dict consisting of different types of
jobs such as chief and worker. Task is information about the current task.
For example: "{"cluster": {"worker": ["host1:port", "host2:port"]},
"task": {"type": "worker", "index": 0}}"
Returns:
A distribution strategy. MirroredStrategy by default. TPUStrategy if `tpu`
arg is specified. MultiWorkerMirroredStrategy if `cluster_config` arg is
specified.
"""
if tpu:
logging.info('Use TPU at %s', tpu)
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=tpu)
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
elif cluster_config:
if not isinstance(cluster_config, dict):
cluster_config = json.loads(cluster_config)
cluster_spec = tf.train.ClusterSpec(cluster_config['cluster'])
resolver = tf.distribute.cluster_resolver.SimpleClusterResolver(
cluster_spec=cluster_spec,
task_type=cluster_config['task']['type'],
task_id=cluster_config['task']['index'],
num_accelerators={'GPU': len(tf.config.list_physical_devices('GPU'))},
rpc_layer='grpc')
strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy(
cluster_resolver=resolver)
else:
logging.info('Defaulting to MirroredStrategy')
strategy = tf.distribute.MirroredStrategy()
return strategy
def expand_path(file_path):
return os.path.expanduser(os.path.expandvars(file_path))
def get_latest_file(dir_path, prefix='operative_config-', suffix='.gin'):
"""Returns latest file with pattern '/dir_path/prefix[iteration]suffix'.
Args:
dir_path: Path to the directory.
prefix: Filename prefix, not including directory.
suffix: Filename suffix, including extension.
Returns:
Path to the latest file
Raises:
FileNotFoundError: If no files match the pattern
'/dir_path/prefix[int]suffix'.
"""
dir_path = expand_path(dir_path)
dir_prefix = os.path.join(dir_path, prefix)
search_pattern = dir_prefix + '*' + suffix
file_paths = tf.io.gfile.glob(search_pattern)
if not file_paths:
raise FileNotFoundError(
f'No files found matching the pattern \'{search_pattern}\'.')
try:
# Filter to get highest iteration, no negative iterations.
get_iter = lambda fp: abs(int(fp.split(dir_prefix)[-1].split(suffix)[0]))
latest_file = max(file_paths, key=get_iter)
return latest_file
except ValueError as verror:
raise FileNotFoundError(
f'Files found with pattern \'{search_pattern}\' do not match '
f'the pattern \'{dir_prefix}[iteration_number]{suffix}\'.\n\n'
f'Files found:\n{file_paths}') from verror
def get_latest_checkpoint(checkpoint_path):
"""Helper function to get path to latest checkpoint.
Args:
checkpoint_path: Path to the directory containing model checkpoints, or
to a specific checkpoint (e.g. `/path/to/model.ckpt-iteration`).
Returns:
Path to latest checkpoint.
Raises:
FileNotFoundError: If no checkpoint is found.
"""
checkpoint_path = expand_path(checkpoint_path)
is_checkpoint = tf.io.gfile.exists(checkpoint_path + '.index')
if is_checkpoint:
# Return the path if it points to a checkpoint.
return checkpoint_path
else:
# Search using 'checkpoints' file.
# Returns None if no 'checkpoints' file, or directory doesn't exist.
ckpt = tf.train.latest_checkpoint(checkpoint_path)
if ckpt:
return ckpt
else:
# Last resort, look for '/path/ckpt-[iter].index' files.
ckpt_f = get_latest_file(checkpoint_path, prefix='ckpt-', suffix='.index')
return ckpt_f.split('.index')[0]
# ---------------------------------- Gin ---------------------------------------
def get_latest_operative_config(restore_dir):
"""Finds the most recently saved operative_config in a directory.
Args:
restore_dir: Path to directory with gin operative_configs. Will also work
if passing a path to a file in that directory such as a checkpoint.
Returns:
Filepath to most recent operative config.
Raises:
FileNotFoundError: If no config is found.
"""
try:<|fim▁hole|> os.path.dirname(restore_dir), prefix='operative_config-', suffix='.gin')
def write_gin_config(summary_writer, save_dir, step):
""""Writes gin operative_config to save_dir and tensorboard."""
config_str = gin.operative_config_str()
# Save the original config string to a file.
base_name = 'operative_config-{}'.format(step)
fname = os.path.join(save_dir, base_name + '.gin')
with tf.io.gfile.GFile(fname, 'w') as f:
f.write(config_str)
# Formatting hack copied from gin.tf.GinConfigSaverHook.
def format_for_tensorboard(line):
"""Convert a single line to markdown format."""
if not line.startswith('#'):
return ' ' + line
line = line[2:]
if line.startswith('===='):
return ''
if line.startswith('None'):
return ' # None.'
if line.endswith(':'):
return '#### ' + line
return line
# Convert config string to markdown.
md_lines = []
for line in config_str.splitlines():
md_line = format_for_tensorboard(line)
if md_line is not None:
md_lines.append(md_line)
md_config_str = '\n'.join(md_lines)
# Add to tensorboard.
with summary_writer.as_default():
text_tensor = tf.convert_to_tensor(md_config_str)
tf.summary.text(name='gin/' + base_name, data=text_tensor, step=step)
summary_writer.flush()
def gin_register_keras_layers():
"""Registers all keras layers and Sequential to be referenceable in gin."""
# Register sequential model.
gin.external_configurable(tf.keras.Sequential, 'tf.keras.Sequential')
# Register all the layers.
for k, v in inspect.getmembers(tf.keras.layers):
# Duck typing for tf.keras.layers.Layer since keras uses metaclasses.
if hasattr(v, 'variables'):
gin.external_configurable(v, f'tf.keras.layers.{k}')
# ------------------------ Training Loop ---------------------------------------
@gin.configurable
def train(data_provider,
trainer,
batch_size=32,
num_steps=1000000,
steps_per_summary=300,
steps_per_save=300,
save_dir='/tmp/ddsp',
restore_dir='/tmp/ddsp',
early_stop_loss_value=None,
report_loss_to_hypertune=False):
"""Main training loop.
Args:
data_provider: DataProvider object for training data.
trainer: Trainer object built with Model to train.
batch_size: Total batch size.
num_steps: Number of training steps.
steps_per_summary: Number of training steps per summary save.
steps_per_save: Number of training steps per checkpoint save.
save_dir: Directory where checkpoints and summaries will be saved.
If empty string, no checkpoints or summaries will be saved.
restore_dir: Directory where latest checkpoints for resuming the training
are stored. If there are no checkpoints in this directory, training will
begin anew.
early_stop_loss_value: Early stopping. When the total_loss reaches below this
value training stops. If None training will run for num_steps steps.
report_loss_to_hypertune: Report loss values to hypertune package for
hyperparameter tuning, such as on Google Cloud AI-Platform.
"""
# Get a distributed dataset iterator.
dataset = data_provider.get_batch(batch_size, shuffle=True, repeats=-1)
dataset = trainer.distribute_dataset(dataset)
dataset_iter = iter(dataset)
# Build model, easiest to just run forward pass.
trainer.build(next(dataset_iter))
# Load latest checkpoint if one exists in load directory.
try:
trainer.restore(restore_dir)
except FileNotFoundError:
logging.info('No existing checkpoint found in %s, skipping '
'checkpoint loading.', restore_dir)
if save_dir:
# Set up the summary writer and metrics.
summary_dir = os.path.join(save_dir, 'summaries', 'train')
summary_writer = tf.summary.create_file_writer(summary_dir)
# Save the gin config.
write_gin_config(summary_writer, save_dir, trainer.step.numpy())
else:
# Need to create a dummy writer, even if no save_dir is provided.
summary_writer = tf.summary.create_noop_writer()
# Train.
with summary_writer.as_default():
tick = time.time()
for iteration in range(num_steps):
step = trainer.step # Step is not iteration if restarting a model.
# Take a step.
losses = trainer.train_step(dataset_iter)
# Create training loss metrics when starting/restarting training.
if iteration == 0:
loss_names = list(losses.keys())
logging.info('Creating metrics for %s', loss_names)
avg_losses = {name: tf.keras.metrics.Mean(name=name, dtype=tf.float32)
for name in loss_names}
# Update metrics.
for k, v in losses.items():
avg_losses[k].update_state(v)
# Log the step.
log_str = 'step: {}\t'.format(int(step.numpy()))
for k, v in losses.items():
log_str += '{}: {:.2f}\t'.format(k, v)
logging.info(log_str)
# Write Summaries.
if step % steps_per_summary == 0 and save_dir:
# Speed.
steps_per_sec = steps_per_summary / (time.time() - tick)
tf.summary.scalar('steps_per_sec', steps_per_sec, step=step)
tick = time.time()
# Metrics.
for k, metric in avg_losses.items():
tf.summary.scalar('losses/{}'.format(k), metric.result(), step=step)
metric.reset_states()
# Report metrics for hyperparameter tuning if enabled.
if report_loss_to_hypertune:
cloud.report_metric_to_hypertune(losses['total_loss'], step.numpy())
# Stop the training when the loss reaches given value
if (early_stop_loss_value is not None and
losses['total_loss'] <= early_stop_loss_value):
logging.info('Total loss reached early stopping value of %s',
early_stop_loss_value)
# Write a final checkpoint.
if save_dir:
trainer.save(save_dir)
summary_writer.flush()
break
# Save Model.
if step % steps_per_save == 0 and save_dir:
trainer.save(save_dir)
summary_writer.flush()
logging.info('Training Finished!')<|fim▁end|> | return get_latest_file(
restore_dir, prefix='operative_config-', suffix='.gin')
except FileNotFoundError:
return get_latest_file( |
<|file_name|>card_validation_test.py<|end_file_name|><|fim▁begin|>import unittest
from card_validation import (
numberToMatrix,
getOddDigits,
getEvenDigits,
sumOfDoubleOddPlace,
sumOfEvenPlace,
getDigit,
isValid
)
class CardValidationTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(CardValidationTest, self).__init__(*args, **kwargs)
self.card_number = "4388576018410707"
self.matrix = numberToMatrix(self.card_number)
self.odds = getOddDigits(self.matrix)
self.evens = getEvenDigits(self.matrix)
def test_numberToMatrix(self):
self.assertEqual(self.matrix.__class__, list)
def test_getOddDigits(self):
self.assertEqual(self.odds.__class__, list)
def test_getEvenDigits(self):
self.assertEqual(self.evens.__class__, list)
def test_sumOfDoubleOddPlace(self):
self.assertEqual(sumOfDoubleOddPlace(self.odds), 29)
<|fim▁hole|>
def test_isValid(self):
self.assertEqual(isValid(self.card_number), True)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | def test_getDigit(self):
self.assertEqual(getDigit(9), 9) |
<|file_name|>helpers.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kuberuntime
import (
"fmt"
"path/filepath"
"strconv"
"github.com/golang/glog"
"k8s.io/kubernetes/pkg/api/v1"
runtimeapi "k8s.io/kubernetes/pkg/kubelet/api/v1alpha1/runtime"
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
"k8s.io/kubernetes/pkg/types"
)
const (
// Taken from lmctfy https://github.com/google/lmctfy/blob/master/lmctfy/controllers/cpu_controller.cc
minShares = 2
sharesPerCPU = 1024
milliCPUToCPU = 1000
// 100000 is equivalent to 100ms
quotaPeriod = 100 * minQuotaPeriod
minQuotaPeriod = 1000
)
var (
// The default dns opt strings
defaultDNSOptions = []string{"ndots:5"}
)
type podsByID []*kubecontainer.Pod
func (b podsByID) Len() int { return len(b) }
func (b podsByID) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
func (b podsByID) Less(i, j int) bool { return b[i].ID < b[j].ID }
type containersByID []*kubecontainer.Container
func (b containersByID) Len() int { return len(b) }
func (b containersByID) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
func (b containersByID) Less(i, j int) bool { return b[i].ID.ID < b[j].ID.ID }
// Newest first.
type podSandboxByCreated []*runtimeapi.PodSandbox
func (p podSandboxByCreated) Len() int { return len(p) }
func (p podSandboxByCreated) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p podSandboxByCreated) Less(i, j int) bool { return p[i].GetCreatedAt() > p[j].GetCreatedAt() }
type containerStatusByCreated []*kubecontainer.ContainerStatus
func (c containerStatusByCreated) Len() int { return len(c) }
func (c containerStatusByCreated) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
func (c containerStatusByCreated) Less(i, j int) bool { return c[i].CreatedAt.After(c[j].CreatedAt) }
// toKubeContainerState converts runtimeapi.ContainerState to kubecontainer.ContainerState.
func toKubeContainerState(state runtimeapi.ContainerState) kubecontainer.ContainerState {
switch state {
case runtimeapi.ContainerState_CONTAINER_CREATED:
return kubecontainer.ContainerStateCreated
case runtimeapi.ContainerState_CONTAINER_RUNNING:
return kubecontainer.ContainerStateRunning
case runtimeapi.ContainerState_CONTAINER_EXITED:
return kubecontainer.ContainerStateExited
case runtimeapi.ContainerState_CONTAINER_UNKNOWN:
return kubecontainer.ContainerStateUnknown
}
return kubecontainer.ContainerStateUnknown
}
// toRuntimeProtocol converts v1.Protocol to runtimeapi.Protocol.
func toRuntimeProtocol(protocol v1.Protocol) runtimeapi.Protocol {<|fim▁hole|> case v1.ProtocolUDP:
return runtimeapi.Protocol_UDP
}
glog.Warningf("Unknown protocol %q: defaulting to TCP", protocol)
return runtimeapi.Protocol_TCP
}
// toKubeContainer converts runtimeapi.Container to kubecontainer.Container.
func (m *kubeGenericRuntimeManager) toKubeContainer(c *runtimeapi.Container) (*kubecontainer.Container, error) {
if c == nil || c.Id == nil || c.Image == nil || c.State == nil {
return nil, fmt.Errorf("unable to convert a nil pointer to a runtime container")
}
labeledInfo := getContainerInfoFromLabels(c.Labels)
annotatedInfo := getContainerInfoFromAnnotations(c.Annotations)
return &kubecontainer.Container{
ID: kubecontainer.ContainerID{Type: m.runtimeName, ID: c.GetId()},
Name: labeledInfo.ContainerName,
Image: c.Image.GetImage(),
Hash: annotatedInfo.Hash,
State: toKubeContainerState(c.GetState()),
}, nil
}
// sandboxToKubeContainer converts runtimeapi.PodSandbox to kubecontainer.Container.
// This is only needed because we need to return sandboxes as if they were
// kubecontainer.Containers to avoid substantial changes to PLEG.
// TODO: Remove this once it becomes obsolete.
func (m *kubeGenericRuntimeManager) sandboxToKubeContainer(s *runtimeapi.PodSandbox) (*kubecontainer.Container, error) {
if s == nil || s.Id == nil || s.State == nil {
return nil, fmt.Errorf("unable to convert a nil pointer to a runtime container")
}
return &kubecontainer.Container{
ID: kubecontainer.ContainerID{Type: m.runtimeName, ID: s.GetId()},
State: kubecontainer.SandboxToContainerState(s.GetState()),
}, nil
}
// getImageUser gets uid or user name that will run the command(s) from image. The function
// guarantees that only one of them is set.
func (m *kubeGenericRuntimeManager) getImageUser(image string) (*int64, *string, error) {
imageStatus, err := m.imageService.ImageStatus(&runtimeapi.ImageSpec{Image: &image})
if err != nil {
return nil, nil, err
}
if imageStatus != nil && imageStatus.Uid != nil {
// If uid is set, return uid.
return imageStatus.Uid, nil, nil
}
if imageStatus != nil && imageStatus.Username != nil {
// If uid is not set, but user name is set, return user name.
return nil, imageStatus.Username, nil
}
// If non of them is set, treat it as root.
return new(int64), nil, nil
}
// isContainerFailed returns true if container has exited and exitcode is not zero.
func isContainerFailed(status *kubecontainer.ContainerStatus) bool {
if status.State == kubecontainer.ContainerStateExited && status.ExitCode != 0 {
return true
}
return false
}
// milliCPUToShares converts milliCPU to CPU shares
func milliCPUToShares(milliCPU int64) int64 {
if milliCPU == 0 {
// Return 2 here to really match kernel default for zero milliCPU.
return minShares
}
// Conceptually (milliCPU / milliCPUToCPU) * sharesPerCPU, but factored to improve rounding.
shares := (milliCPU * sharesPerCPU) / milliCPUToCPU
if shares < minShares {
return minShares
}
return shares
}
// milliCPUToQuota converts milliCPU to CFS quota and period values
func milliCPUToQuota(milliCPU int64) (quota int64, period int64) {
// CFS quota is measured in two values:
// - cfs_period_us=100ms (the amount of time to measure usage across)
// - cfs_quota=20ms (the amount of cpu time allowed to be used across a period)
// so in the above example, you are limited to 20% of a single CPU
// for multi-cpu environments, you just scale equivalent amounts
if milliCPU == 0 {
return
}
// we set the period to 100ms by default
period = quotaPeriod
// we then convert your milliCPU to a value normalized over a period
quota = (milliCPU * quotaPeriod) / milliCPUToCPU
// quota needs to be a minimum of 1ms.
if quota < minQuotaPeriod {
quota = minQuotaPeriod
}
return
}
// getStableKey generates a key (string) to uniquely identify a
// (pod, container) tuple. The key should include the content of the
// container, so that any change to the container generates a new key.
func getStableKey(pod *v1.Pod, container *v1.Container) string {
hash := strconv.FormatUint(kubecontainer.HashContainer(container), 16)
return fmt.Sprintf("%s_%s_%s_%s_%s", pod.Name, pod.Namespace, string(pod.UID), container.Name, hash)
}
// buildContainerLogsPath builds log path for container relative to pod logs directory.
func buildContainerLogsPath(containerName string, restartCount int) string {
return fmt.Sprintf("%s_%d.log", containerName, restartCount)
}
// buildFullContainerLogsPath builds absolute log path for container.
func buildFullContainerLogsPath(podUID types.UID, containerName string, restartCount int) string {
return filepath.Join(buildPodLogsDirectory(podUID), buildContainerLogsPath(containerName, restartCount))
}
// buildPodLogsDirectory builds absolute log directory path for a pod sandbox.
func buildPodLogsDirectory(podUID types.UID) string {
return filepath.Join(podLogsRootDirectory, string(podUID))
}
// toKubeRuntimeStatus converts the runtimeapi.RuntimeStatus to kubecontainer.RuntimeStatus.
func toKubeRuntimeStatus(status *runtimeapi.RuntimeStatus) *kubecontainer.RuntimeStatus {
conditions := []kubecontainer.RuntimeCondition{}
for _, c := range status.GetConditions() {
conditions = append(conditions, kubecontainer.RuntimeCondition{
Type: kubecontainer.RuntimeConditionType(c.GetType()),
Status: c.GetStatus(),
Reason: c.GetReason(),
Message: c.GetMessage(),
})
}
return &kubecontainer.RuntimeStatus{Conditions: conditions}
}<|fim▁end|> | switch protocol {
case v1.ProtocolTCP:
return runtimeapi.Protocol_TCP |
<|file_name|>summary.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Plugin to parse the OLECF summary/document summary information items."""
from plaso.lib import event
from plaso.lib import eventdata
from plaso.parsers.olecf_plugins import interface
class OleCfSummaryInfoEvent(event.FiletimeEvent):
"""Convenience class for an OLECF Summary info event."""
DATA_TYPE = 'olecf:summary_info'
def __init__(self, timestamp, usage, attributes):
"""Initializes the event.
Args:
timestamp: The FILETIME timestamp value.
usage: The usage string, describing the timestamp value.
attributes: A dict object containing all extracted attributes.<|fim▁hole|> super(OleCfSummaryInfoEvent, self).__init__(
timestamp, usage)
self.name = u'Summary Information'
for attribute_name, attribute_value in attributes.iteritems():
setattr(self, attribute_name, attribute_value)
# TODO: Move this class to a higher level (to the interface)
# so the these functions can be shared by other plugins.
class OleCfSummaryInfo(object):
"""An OLECF Summary Info object."""
_CLASS_IDENTIFIER = 'f29f85e0-4ff9-1068-ab91-08002b27b3d9'
_PROPERTY_NAMES_INT32 = {
0x000e: 'number_of_pages', # PIDSI_PAGECOUNT
0x000f: 'number_of_words', # PIDSI_WORDCOUNT
0x0010: 'number_of_characters', # PIDSI_CHARCOUNT
0x0013: 'security', # PIDSI_SECURITY
}
_PROPERTY_NAMES_STRING = {
0x0002: 'title', # PIDSI_TITLE
0x0003: 'subject', # PIDSI_SUBJECT
0x0004: 'author', # PIDSI_AUTHOR
0x0005: 'keywords', # PIDSI_KEYWORDS
0x0006: 'comments', # PIDSI_COMMENTS
0x0007: 'template', # PIDSI_TEMPLATE
0x0008: 'last_saved_by', # PIDSI_LASTAUTHOR
0x0009: 'revision_number', # PIDSI_REVNUMBER
0x0012: 'application', # PIDSI_APPNAME
}
PIDSI_CODEPAGE = 0x0001
PIDSI_EDITTIME = 0x000a
PIDSI_LASTPRINTED = 0x000b
PIDSI_CREATE_DTM = 0x000c
PIDSI_LASTSAVE_DTM = 0x000d
PIDSI_THUMBNAIL = 0x0011
def __init__(self, olecf_item, root_creation_time, root_modification_time):
"""Initialize the OLECF summary object.
Args:
olecf_item: The OLECF item (instance of pyolecf.property_set_stream).
root_creation_time: The creation time of the root OLECF item.
root_modification_time: The modification time of the root OLECF item.
"""
super(OleCfSummaryInfo, self).__init__()
self._root_creation_time = root_creation_time
self._root_modification_time = root_modification_time
self._events = []
self.attributes = {}
self._InitFromPropertySet(olecf_item.set)
def _InitFromPropertySet(self, property_set):
"""Initializes the object from a property set.
Args:
property_set: The OLECF property set (pyolecf.property_set).
"""
# Combine the values of multiple property sections
# but do not override properties that are already set.
for property_section in property_set.sections:
if property_section.class_identifier != self._CLASS_IDENTIFIER:
continue
for property_value in property_section.properties:
self._InitFromPropertyValue(property_value)
def _InitFromPropertyValue(self, property_value):
"""Initializes the object from a property value.
Args:
property_value: The OLECF property value (pyolecf.property_value).
"""
if property_value.type == interface.OleDefinitions.VT_I2:
self._InitFromPropertyValueTypeInt16(property_value)
elif property_value.type == interface.OleDefinitions.VT_I4:
self._InitFromPropertyValueTypeInt32(property_value)
elif (property_value.type == interface.OleDefinitions.VT_LPSTR or
property_value.type == interface.OleDefinitions.VT_LPWSTR):
self._InitFromPropertyValueTypeString(property_value)
elif property_value.type == interface.OleDefinitions.VT_FILETIME:
self._InitFromPropertyValueTypeFiletime(property_value)
def _InitFromPropertyValueTypeInt16(self, property_value):
"""Initializes the object from a 16-bit int type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_I2).
"""
if property_value.identifier == self.PIDSI_CODEPAGE:
# TODO: can the codepage vary per property section?
# And is it needed to interpret the ASCII strings?
# codepage = property_value.data_as_integer
pass
def _InitFromPropertyValueTypeInt32(self, property_value):
"""Initializes the object from a 32-bit int type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_I4).
"""
property_name = self._PROPERTY_NAMES_INT32.get(
property_value.identifier, None)
if property_name and not property_name in self.attributes:
self.attributes[property_name] = property_value.data_as_integer
def _InitFromPropertyValueTypeString(self, property_value):
"""Initializes the object from a string type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_LPSTR or VT_LPWSTR).
"""
property_name = self._PROPERTY_NAMES_STRING.get(
property_value.identifier, None)
if property_name and not property_name in self.attributes:
self.attributes[property_name] = property_value.data_as_string
def _InitFromPropertyValueTypeFiletime(self, property_value):
"""Initializes the object from a filetime type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_FILETIME).
"""
if property_value.identifier == self.PIDSI_LASTPRINTED:
self._events.append(
(property_value.data_as_integer, 'Document Last Printed Time'))
elif property_value.identifier == self.PIDSI_CREATE_DTM:
self._events.append(
(property_value.data_as_integer, 'Document Creation Time'))
elif property_value.identifier == self.PIDSI_LASTSAVE_DTM:
self._events.append(
(property_value.data_as_integer, 'Document Last Save Time'))
elif property_value.identifier == self.PIDSI_EDITTIME:
# property_name = 'total_edit_time'
# TODO: handle duration.
pass
def GetEventObjects(self):
"""Yields extracted event objects."""
for timestamp, timestamp_description in self._events:
yield OleCfSummaryInfoEvent(
timestamp, timestamp_description, self.attributes)
if self._root_creation_time:
yield OleCfSummaryInfoEvent(
self._root_creation_time, eventdata.EventTimestamp.CREATION_TIME,
self.attributes)
if self._root_modification_time:
yield OleCfSummaryInfoEvent(
self._root_modification_time,
eventdata.EventTimestamp.MODIFICATION_TIME, self.attributes)
class OleCfDocumentSummaryInfoEvent(event.FiletimeEvent):
"""Convenience class for an OLECF Document Summary info event."""
DATA_TYPE = 'olecf:document_summary_info'
_CLASS_IDENTIFIER = 'd5cdd502-2e9c-101b-9397-08002b2cf9ae'
_PROPERTY_NAMES_BOOL = {
0x0013: 'shared_document', # PIDDSI_SHAREDDOC
}
_PROPERTY_NAMES_INT32 = {
0x0004: 'number_of_bytes', # PIDDSI_BYTECOUNT
0x0005: 'number_of_lines', # PIDDSI_LINECOUNT
0x0006: 'number_of_paragraphs', # PIDDSI_PARCOUNT
0x0007: 'number_of_slides', # PIDDSI_SLIDECOUNT
0x0008: 'number_of_notes', # PIDDSI_NOTECOUNT
0x0009: 'number_of_hidden_slides', # PIDDSI_HIDDENCOUNT
0x000a: 'number_of_clips', # PIDDSI_MMCLIPCOUNT
0x0011: 'number_of_characters_with_white_space', # PIDDSI_CCHWITHSPACES
0x0017: 'application_version', # PIDDSI_VERSION
}
_PROPERTY_NAMES_STRING = {
0x000e: 'manager', # PIDDSI_MANAGER
0x000f: 'company', # PIDDSI_COMPANY
0x001a: 'content_type', # PIDDSI_CONTENTTYPE
0x001b: 'content_status', # PIDDSI_CONTENTSTATUS
0x001c: 'language', # PIDDSI_LANGUAGE
0x001d: 'document_version', # PIDDSI_DOCVERSION
}
PIDDSI_CODEPAGE = 0x0001
PIDDSI_CATEGORY = 0x0002
PIDDSI_PRESFORMAT = 0x0003
PIDDSI_SCALE = 0x000b
PIDDSI_HEADINGPAIR = 0x000c
PIDDSI_DOCPARTS = 0x000d
PIDDSI_LINKSDIRTY = 0x0010
PIDDSI_VERSION = 0x0017
def __init__(self, timestamp, usage, olecf_item):
"""Initializes the event.
Args:
timestamp: The FILETIME timestamp value.
usage: The usage string, describing the timestamp value.
olecf_item: The OLECF item (pyolecf.property_set_stream).
"""
super(OleCfDocumentSummaryInfoEvent, self).__init__(
timestamp, usage)
self.name = u'Document Summary Information'
self._InitFromPropertySet(olecf_item.set)
def _InitFromPropertySet(self, property_set):
"""Initializes the event from a property set.
Args:
property_set: The OLECF property set (pyolecf.property_set).
"""
# Combine the values of multiple property sections
# but do not override properties that are already set.
for property_section in property_set.sections:
if property_section.class_identifier != self._CLASS_IDENTIFIER:
continue
for property_value in property_section.properties:
self._InitFromPropertyValue(property_value)
def _InitFromPropertyValue(self, property_value):
"""Initializes the event from a property value.
Args:
property_value: The OLECF property value (pyolecf.property_value).
"""
if property_value.type == interface.OleDefinitions.VT_I2:
self._InitFromPropertyValueTypeInt16(property_value)
elif property_value.type == interface.OleDefinitions.VT_I4:
self._InitFromPropertyValueTypeInt32(property_value)
elif property_value.type == interface.OleDefinitions.VT_BOOL:
self._InitFromPropertyValueTypeBool(property_value)
elif (property_value.type == interface.OleDefinitions.VT_LPSTR or
property_value.type == interface.OleDefinitions.VT_LPWSTR):
self._InitFromPropertyValueTypeString(property_value)
def _InitFromPropertyValueTypeInt16(self, property_value):
"""Initializes the event from a 16-bit int type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_I2).
"""
if property_value.identifier == self.PIDDSI_CODEPAGE:
# TODO: can the codepage vary per property section?
# And is it needed to interpret the ASCII strings?
# codepage = property_value.data_as_integer
pass
def _InitFromPropertyValueTypeInt32(self, property_value):
"""Initializes the event from a 32-bit int type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_I4).
"""
property_name = self._PROPERTY_NAMES_INT32.get(
property_value.identifier, None)
# The application version consists of 2 16-bit values that make up
# the version number. Where the upper 16-bit is the major number
# and the lower 16-bit the minor number.
if property_value.identifier == self.PIDDSI_VERSION:
application_version = property_value.data_as_integer
setattr(self, property_name, u'{0:d}.{1:d}'.format(
application_version >> 16, application_version & 0xffff))
elif property_name and not hasattr(self, property_name):
setattr(self, property_name, property_value.data_as_integer)
def _InitFromPropertyValueTypeBool(self, property_value):
"""Initializes the event from a boolean type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_BOOL).
"""
property_name = self._PROPERTY_NAMES_BOOL.get(
property_value.identifier, None)
if property_name and not hasattr(self, property_name):
setattr(self, property_name, property_value.data_as_boolean)
def _InitFromPropertyValueTypeString(self, property_value):
"""Initializes the event from a string type property value.
Args:
property_value: The OLECF property value (pyolecf.property_value
of type VT_LPSTR or VT_LPWSTR).
"""
property_name = self._PROPERTY_NAMES_STRING.get(
property_value.identifier, None)
if property_name and not hasattr(self, property_name):
setattr(self, property_name, property_value.data_as_string)
class DocumentSummaryPlugin(interface.OlecfPlugin):
"""Plugin that parses DocumentSummary information from an OLECF file."""
NAME = 'olecf_document_summary'
REQUIRED_ITEMS = frozenset(['\005DocumentSummaryInformation'])
def GetEntries(self, root_item, items, **unused_kwargs):
"""Generate event based on the document summary item.
Args:
root_item: The root item of the OLECF file.
item_names: A list of all items discovered in the root.
Yields:
Event objects (instance of OleCfDocumentSummaryInfoEvent).
"""
creation_time, modification_time = self.GetTimestamps(root_item)
for item in items:
if creation_time:
yield OleCfDocumentSummaryInfoEvent(
creation_time, eventdata.EventTimestamp.CREATION_TIME, item)
if modification_time:
yield OleCfDocumentSummaryInfoEvent(
modification_time, eventdata.EventTimestamp.MODIFICATION_TIME,
item)
class SummaryInfoPlugin(interface.OlecfPlugin):
"""Plugin that parses the SummaryInformation item from an OLECF file."""
NAME = 'olecf_summary'
REQUIRED_ITEMS = frozenset(['\005SummaryInformation'])
def GetEntries(self, root_item, items, **unused_kwargs):
"""Generate event based on the summary information item.
Args:
root_item: The root item of the OLECF file.
item_names: A list of all items discovered in the root.
Yields:
Event objects (instance of OleCfSummaryInfoEvent).
"""
root_creation_time, root_modification_time = self.GetTimestamps(root_item)
for item in items:
summary_information_object = OleCfSummaryInfo(
item, root_creation_time, root_modification_time)
for event_object in summary_information_object.GetEventObjects():
yield event_object<|fim▁end|> | """ |
<|file_name|>tools.js<|end_file_name|><|fim▁begin|>Object.prototype.getKeyByValue = function( value ) {
for( var prop in this ) {
if( this.hasOwnProperty( prop ) ) {
if( this[ prop ] === value )<|fim▁hole|><|fim▁end|> | return prop;
}
}
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
pip.vendor is for vendoring dependencies of pip to prevent needing pip to
depend on something external.<|fim▁hole|>Files inside of pip.vendor should be considered immutable and should only be
updated to versions from upstream.
"""
from __future__ import absolute_import<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! A cell that with volatile setter and getter.
#![feature(no_std, core_intrinsics)]
#![no_std]
#[cfg(feature="replayer")] #[macro_use(expect)] extern crate expectest;
#[cfg(feature="replayer")] #[macro_use] extern crate std;
#[cfg(feature="replayer")] use std::vec::Vec;
#[cfg(feature="replayer")] use expectest::prelude::*;
#[cfg(feature="replayer")] use std::string::String;
#[cfg(feature="replayer")] use std::fmt;
#[cfg(feature="replayer")] use core::cmp::PartialEq;
#[cfg(feature="replayer")] use core::clone::Clone;
#[cfg(feature="replayer")] use core::cell::RefCell;
#[cfg(not(feature="replayer"))] use core::intrinsics::{volatile_load, volatile_store};
#[cfg(feature="replayer")] use core::intrinsics::transmute;
// TODO(farcaller): why this needs copy/clone?
/// This structure is used to represent a hardware register.
/// It is mostly used by the ioreg family of macros.
#[derive(Copy, Clone)]
pub struct VolatileCell<T> {
value: T,
}
impl<T> VolatileCell<T> {
/// Create a cell with initial value.
pub fn new(value: T) -> VolatileCell<T> {
VolatileCell {
value: value,
}
}
/// Get register value.
#[cfg(not(feature="replayer"))]
#[inline]
pub fn get(&self) -> T {
unsafe {
volatile_load(&self.value)
}
}
/// Set register value.
#[cfg(not(feature="replayer"))]
#[inline]
pub fn set(&self, value: T) {
unsafe {
volatile_store(&self.value as *const T as *mut T, value)
}
}
}
#[cfg(feature="replayer")]
impl VolatileCell<u32> {
pub fn get(&self) -> u32 {
unsafe {
GLOBAL_REPLAYER.with(|gr| { gr.borrow_mut().get_cell(transmute(&self.value)) })
}
}
pub fn set(&self, value: u32) {
unsafe {<|fim▁hole|>
#[cfg(feature="replayer")]
impl VolatileCell<u16> {
pub fn get(&self) -> u16 {
unsafe {
GLOBAL_REPLAYER.with(|gr| { gr.borrow_mut().get_cell(transmute(&self.value)) }) as u16
}
}
pub fn set(&self, value: u16) {
unsafe {
GLOBAL_REPLAYER.with(|gr| { gr.borrow_mut().set_cell(transmute(&self.value), value as u32) })
}
}
}
#[cfg(feature="replayer")]
impl VolatileCell<u8> {
pub fn get(&self) -> u8 {
unsafe {
GLOBAL_REPLAYER.with(|gr| { gr.borrow_mut().get_cell(transmute(&self.value)) }) as u8
}
}
pub fn set(&self, value: u8) {
unsafe {
GLOBAL_REPLAYER.with(|gr| { gr.borrow_mut().set_cell(transmute(&self.value), value as u32) })
}
}
}
#[cfg(feature="replayer")]
struct ReplayRecord {
is_read: bool,
address: usize,
value: u32,
replayed: bool,
did_read: bool,
actual_address: usize,
actual_value: u32,
loc: expectest::core::SourceLocation,
}
#[cfg(feature="replayer")]
impl core::fmt::Display for ReplayRecord {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self.is_read {
true => write!(f, "read 0x{:x} from 0x{:x}", self.value, self.address),
false => write!(f, "write 0x{:x} to 0x{:x}", self.value, self.address),
}
}
}
#[cfg(feature="replayer")]
pub struct VolatileCellReplayer {
replays: Vec<ReplayRecord>,
current_replay: usize,
}
#[cfg(feature="replayer")]
impl VolatileCellReplayer {
pub fn new() -> VolatileCellReplayer {
VolatileCellReplayer {
replays: Vec::new(),
current_replay: 0,
}
}
pub fn expect_read(&mut self, address: usize, value: u32,
loc: expectest::core::SourceLocation) {
self.replays.push(ReplayRecord {
is_read: true,
address: address,
value: value,
replayed: false,
did_read: false,
actual_address: 0,
actual_value: 0,
loc: loc,
});
}
pub fn expect_write(&mut self, address: usize, value: u32,
loc: expectest::core::SourceLocation) {
self.replays.push(ReplayRecord {
is_read: false,
address: address,
value: value,
replayed: false,
did_read: false,
actual_address: 0,
actual_value: 0,
loc: loc,
});
}
pub fn verify(&self, loc: expectest::core::SourceLocation) {
expect(self.current_replay).location(loc).to(
be_equal_to_with_context(
self.replays.len(),
format!("expected {} replays, performed {}",
self.replays.len(), self.current_replay)));
for ref replay in &*self.replays {
expect(replay.replayed).location(replay.loc).to(be_equal_to_with_context(true,
format!("expected replay {} to be performed, was not", replay)));
expect(replay.is_read).location(replay.loc).to(be_equal_to_with_context(replay.did_read,
format!("expected replay to be {} replay, was {} replay",
if replay.is_read {"read"} else {"write"},
if replay.is_read {"write"} else {"read"})));
expect(replay.address).location(replay.loc).to(be_equal_to_with_context(replay.actual_address,
format!("expected replay address 0x{:x}, was 0x{:x}", replay.address, replay.actual_address)));
if !replay.is_read {
expect(replay.value).location(replay.loc).to(be_equal_to_with_context(replay.actual_value,
format!("expected replay to write 0x{:x}, written 0x{:x}", replay.value, replay.actual_value)));
}
}
}
pub fn get_cell(&mut self, address: usize) -> u32 {
if self.current_replay >= self.replays.len() {
panic!("get_cell(0x{:x}) faled, current replay: {}, total replays: {}",
address, self.current_replay+1, self.replays.len());
}
let replay: &mut ReplayRecord = &mut self.replays[self.current_replay];
replay.replayed = true;
replay.did_read = true;
replay.actual_address = address;
self.current_replay += 1;
replay.value
}
pub fn set_cell(&mut self, address: usize, value: u32) {
if self.current_replay >= self.replays.len() {
panic!("set_cell(0x{:x}, 0x{:x}) faled, current replay: {}, total replays: {}",
address, value, self.current_replay+1, self.replays.len());
}
let replay: &mut ReplayRecord = &mut self.replays[self.current_replay];
replay.replayed = true;
replay.did_read = false;
replay.actual_address = address;
replay.actual_value = value;
self.current_replay += 1;
}
}
#[cfg(feature="replayer")]
thread_local!(static GLOBAL_REPLAYER: RefCell<VolatileCellReplayer> = RefCell::new(VolatileCellReplayer::new()));
#[cfg(feature="replayer")]
pub fn set_replayer(replayer: VolatileCellReplayer) {
GLOBAL_REPLAYER.with(|gr| {
let mut bm = gr.borrow_mut();
*bm = replayer;
});
}
#[cfg(feature="replayer")]
pub fn with_mut_replayer<F>(f: F) where F: core::ops::FnOnce(&mut VolatileCellReplayer) {
GLOBAL_REPLAYER.with(|gr| {
let mut bm = gr.borrow_mut();
f(&mut *bm);
});
}
#[cfg(feature="replayer")]
struct BeEqualToWithContext<E> {
expected: E,
context: String,
}
#[cfg(feature="replayer")]
fn be_equal_to_with_context<E>(expected: E, context: String) -> BeEqualToWithContext<E> {
BeEqualToWithContext {
expected: expected,
context: context,
}
}
#[cfg(feature="replayer")]
impl<A, E> Matcher<A, E> for BeEqualToWithContext<E>
where
A: PartialEq<E> + fmt::Debug,
E: fmt::Debug {
fn failure_message(&self, _: expectest::core::Join, _: &A) -> String {
self.context.clone()
}
fn matches(&self, actual: &A) -> bool {
*actual == self.expected
}
}
#[macro_export]
macro_rules! expect_volatile_read {
($addr: expr, $val: expr) => (
$crate::with_mut_replayer(|r| {
r.expect_read($addr, $val, expectest::core::SourceLocation::new(file!(), line!()));
})
);
}
#[macro_export]
macro_rules! expect_volatile_write {
($addr: expr, $val: expr) => (
$crate::with_mut_replayer(|r| {
r.expect_write($addr, $val, expectest::core::SourceLocation::new(file!(), line!()));
})
);
}
#[macro_export]
macro_rules! expect_replayer_valid {
() => (
$crate::with_mut_replayer(|r| {
r.verify(expectest::core::SourceLocation::new(file!(), line!()));
})
);
}
#[macro_export]
macro_rules! init_replayer {
() => (
set_replayer(VolatileCellReplayer::new());
);
}<|fim▁end|> | GLOBAL_REPLAYER.with(|gr| { gr.borrow_mut().set_cell(transmute(&self.value), value) })
}
}
} |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! An asynchronous implementation of [STUN][RFC 5389] server and client.
//!
//! # Examples
//!
//! An example that issues a `BINDING` request:
//!
//! ```
//! # extern crate fibers_global;
//! # extern crate fibers_transport;
//! # extern crate futures;
//! # extern crate rustun;
//! # extern crate stun_codec;
//! # extern crate trackable;
//! use fibers_transport::UdpTransporter;
//! use futures::Future;
//! use rustun::channel::Channel;
//! use rustun::client::Client;
//! use rustun::message::Request;
//! use rustun::server::{BindingHandler, UdpServer};
//! use rustun::transport::StunUdpTransporter;
//! use rustun::Error;
//! use stun_codec::{rfc5389, MessageDecoder, MessageEncoder};
//!
//! # fn main() -> Result<(), trackable::error::MainError> {
//! let addr = "127.0.0.1:0".parse().unwrap();
//!
//! // Starts UDP server
//! let server = fibers_global::execute(UdpServer::start(fibers_global::handle(), addr, BindingHandler))?;
//! let server_addr = server.local_addr();
//! fibers_global::spawn(server.map(|_| ()).map_err(|e| panic!("{}", e)));
//!
//! // Sents BINDING request
//! let response = UdpTransporter::<MessageEncoder<_>, MessageDecoder<_>>::bind(addr)
//! .map_err(Error::from)
//! .map(StunUdpTransporter::new)
//! .map(Channel::new)
//! .and_then(move |channel| {
//! let client = Client::new(&fibers_global::handle(), channel);
//! let request = Request::<rfc5389::Attribute>::new(rfc5389::methods::BINDING);
//! client.call(server_addr, request)
//! });
//!
//! // Waits BINDING response<|fim▁hole|>//! let response = fibers_global::execute(response)?;
//! assert!(response.is_ok());
//! # Ok(())
//! # }
//! ```
//!
//! You can run example server and client that handle `BINDING` method as follows:
//!
//! ```console
//! // Starts the STUN server in a shell.
//! $ cargo run --example binding_srv
//!
//! // Executes a STUN client in another shell.
//! $ cargo run --example binding_cli -- 127.0.0.1
//! Ok(SuccessResponse(Message {
//! class: SuccessResponse,
//! method: Method(1),
//! transaction_id: TransactionId(0x344A403694972F5E53B69465),
//! attributes: [Known { inner: XorMappedAddress(XorMappedAddress(V4(127.0.0.1:54754))),
//! padding: Some(Padding([])) }]
//! }))
//! ```
//!
//! # References
//!
//! - [RFC 5389 - Session Traversal Utilities for NAT (STUN)][RFC 5389]
//!
//! [RFC 5389]: https://tools.ietf.org/html/rfc5389
#![warn(missing_docs)]
extern crate bytecodec;
extern crate factory;
extern crate fibers;
#[cfg(test)]
extern crate fibers_global;
extern crate fibers_timeout_queue;
extern crate fibers_transport;
extern crate futures;
extern crate rand;
extern crate stun_codec;
#[macro_use]
extern crate trackable;
pub use error::{Error, ErrorKind};
pub mod channel;
pub mod client;
pub mod message;
pub mod server;
pub mod transport;
mod error;
/// A specialized `Result` type for this crate.
pub type Result<T> = std::result::Result<T, Error>;
#[cfg(test)]
mod tests {
use crate::channel::Channel;
use crate::client::Client;
use crate::message::Request;
use crate::server::{BindingHandler, TcpServer, UdpServer};
use crate::transport::{StunTcpTransporter, StunUdpTransporter};
use crate::Error;
use factory::DefaultFactory;
use fibers_global;
use fibers_transport::{TcpTransporter, UdpTransporter};
use futures::Future;
use std::thread;
use std::time::Duration;
use stun_codec::rfc5389;
use stun_codec::{MessageDecoder, MessageEncoder};
use trackable::error::MainError;
#[test]
fn basic_udp_test() -> Result<(), MainError> {
let server = fibers_global::execute(UdpServer::start(
fibers_global::handle(),
"127.0.0.1:0".parse().unwrap(),
BindingHandler,
))?;
let server_addr = server.local_addr();
fibers_global::spawn(server.map(|_| ()).map_err(|e| panic!("{}", e)));
let client_addr = "127.0.0.1:0".parse().unwrap();
let response = UdpTransporter::<MessageEncoder<_>, MessageDecoder<_>>::bind(client_addr)
.map_err(Error::from)
.map(StunUdpTransporter::new)
.map(Channel::new)
.and_then(move |channel| {
let client = Client::new(&fibers_global::handle(), channel);
let request = Request::<rfc5389::Attribute>::new(rfc5389::methods::BINDING);
client.call(server_addr, request)
});
let response = track!(fibers_global::execute(response))?;
assert!(response.is_ok());
Ok(())
}
#[test]
fn basic_tcp_test() -> Result<(), MainError> {
let server = fibers_global::execute(TcpServer::start(
fibers_global::handle(),
"127.0.0.1:0".parse().unwrap(),
DefaultFactory::<BindingHandler>::new(),
))?;
let server_addr = server.local_addr();
fibers_global::spawn(server.map(|_| ()).map_err(|e| panic!("{}", e)));
thread::sleep(Duration::from_millis(50));
let response = TcpTransporter::<MessageEncoder<_>, MessageDecoder<_>>::connect(server_addr)
.map_err(Error::from)
.map(StunTcpTransporter::new)
.map(Channel::new)
.and_then(move |channel| {
let client = Client::new(&fibers_global::handle(), channel);
let request = Request::<rfc5389::Attribute>::new(rfc5389::methods::BINDING);
client.call((), request)
});
let response = track!(fibers_global::execute(response))?;
assert!(response.is_ok());
Ok(())
}
}<|fim▁end|> | |
<|file_name|>element.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Element nodes.
use app_units::Au;
use cssparser::Color;
use devtools_traits::AttrInfo;
use dom::activation::Activatable;
use dom::attr::AttrValue;
use dom::attr::{Attr, AttrHelpersForLayout};
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::Bindings::ElementBinding;
use dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::HTMLInputElementBinding::HTMLInputElementMethods;
use dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
use dom::bindings::codegen::Bindings::NamedNodeMapBinding::NamedNodeMapMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::UnionTypes::NodeOrString;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::{Castable, ElementTypeId, HTMLElementTypeId, NodeTypeId};
use dom::bindings::js::{JS, LayoutJS, MutNullableHeap};
use dom::bindings::js::{Root, RootedReference};
use dom::bindings::trace::JSTraceable;
use dom::bindings::xmlname::XMLName::InvalidXMLName;
use dom::bindings::xmlname::{namespace_from_domstring, validate_and_extract, xml_name_type};
use dom::characterdata::CharacterData;
use dom::create::create_element;
use dom::document::{Document, LayoutDocumentHelpers};
use dom::domrect::DOMRect;
use dom::domrectlist::DOMRectList;
use dom::domtokenlist::DOMTokenList;
use dom::event::Event;
use dom::htmlanchorelement::HTMLAnchorElement;
use dom::htmlbodyelement::HTMLBodyElement;
use dom::htmlcollection::HTMLCollection;
use dom::htmlfieldsetelement::HTMLFieldSetElement;
use dom::htmlfontelement::HTMLFontElement;
use dom::htmliframeelement::HTMLIFrameElement;
use dom::htmlinputelement::{HTMLInputElement, LayoutHTMLInputElementHelpers};
use dom::htmllabelelement::HTMLLabelElement;
use dom::htmllegendelement::HTMLLegendElement;
use dom::htmloptgroupelement::HTMLOptGroupElement;
use dom::htmltablecellelement::{HTMLTableCellElement, HTMLTableCellElementLayoutHelpers};
use dom::htmltableelement::HTMLTableElement;
use dom::htmltablerowelement::HTMLTableRowElement;
use dom::htmltablesectionelement::HTMLTableSectionElement;
use dom::htmltemplateelement::HTMLTemplateElement;
use dom::htmltextareaelement::{HTMLTextAreaElement, RawLayoutHTMLTextAreaElementHelpers};
use dom::namednodemap::NamedNodeMap;
use dom::node::{CLICK_IN_PROGRESS, LayoutNodeHelpers, Node};
use dom::node::{NodeDamage, SEQUENTIALLY_FOCUSABLE};
use dom::node::{document_from_node, window_from_node};
use dom::nodelist::NodeList;
use dom::text::Text;
use dom::virtualmethods::{VirtualMethods, vtable_for};
use html5ever::serialize;
use html5ever::serialize::SerializeOpts;
use html5ever::serialize::TraversalScope;
use html5ever::serialize::TraversalScope::{ChildrenOnly, IncludeNode};
use html5ever::tree_builder::{LimitedQuirks, NoQuirks, Quirks};
use selectors::matching::{DeclarationBlock, matches};
use selectors::parser::{AttrSelector, NamespaceConstraint, parse_author_origin_selector_list_from_str};
use selectors::states::*;
use smallvec::VecLike;
use std::ascii::AsciiExt;
use std::borrow::{Cow, ToOwned};
use std::cell::{Cell, Ref};
use std::default::Default;
use std::mem;
use std::sync::Arc;
use string_cache::{Atom, Namespace, QualName};
use style::legacy::{UnsignedIntegerAttribute, from_declaration};
use style::properties::DeclaredValue;
use style::properties::longhands::{self, background_image, border_spacing, font_family, font_size};
use style::properties::{PropertyDeclaration, PropertyDeclarationBlock, parse_style_attribute};
use style::values::CSSFloat;
use style::values::specified::{self, CSSColor, CSSRGBA, LengthOrPercentage};
use url::UrlParser;
use util::mem::HeapSizeOf;
use util::str::{DOMString, LengthOrPercentageOrAuto};
// TODO: Update focus state when the top-level browsing context gains or loses system focus,
// and when the element enters or leaves a browsing context container.
// https://html.spec.whatwg.org/multipage/#selector-focus
#[dom_struct]
pub struct Element {
node: Node,
local_name: Atom,
namespace: Namespace,
prefix: Option<DOMString>,
attrs: DOMRefCell<Vec<JS<Attr>>>,
id_attribute: DOMRefCell<Option<Atom>>,
style_attribute: DOMRefCell<Option<PropertyDeclarationBlock>>,
attr_list: MutNullableHeap<JS<NamedNodeMap>>,
class_list: MutNullableHeap<JS<DOMTokenList>>,
state: Cell<ElementState>,
}
impl PartialEq for Element {
fn eq(&self, other: &Element) -> bool {
self as *const Element == &*other
}
}
#[derive(PartialEq, HeapSizeOf)]
pub enum ElementCreator {
ParserCreated,
ScriptCreated,
}
//
// Element methods
//
impl Element {
pub fn create(name: QualName, prefix: Option<Atom>,
document: &Document, creator: ElementCreator)
-> Root<Element> {
create_element(name, prefix, document, creator)
}
pub fn new_inherited(local_name: DOMString,
namespace: Namespace, prefix: Option<DOMString>,
document: &Document) -> Element {
Element::new_inherited_with_state(ElementState::empty(), local_name,
namespace, prefix, document)
}
pub fn new_inherited_with_state(state: ElementState, local_name: DOMString,
namespace: Namespace, prefix: Option<DOMString>,
document: &Document)
-> Element {
Element {
node: Node::new_inherited(document),
local_name: Atom::from_slice(&local_name),
namespace: namespace,
prefix: prefix,
attrs: DOMRefCell::new(vec!()),
id_attribute: DOMRefCell::new(None),
style_attribute: DOMRefCell::new(None),
attr_list: Default::default(),
class_list: Default::default(),
state: Cell::new(state),
}
}
pub fn new(local_name: DOMString,
namespace: Namespace,
prefix: Option<DOMString>,
document: &Document) -> Root<Element> {
Node::reflect_node(
box Element::new_inherited(local_name, namespace, prefix, document),
document,
ElementBinding::Wrap)
}
}
#[allow(unsafe_code)]
pub trait RawLayoutElementHelpers {
unsafe fn get_attr_for_layout<'a>(&'a self, namespace: &Namespace, name: &Atom)
-> Option<&'a AttrValue>;
unsafe fn get_attr_val_for_layout<'a>(&'a self, namespace: &Namespace, name: &Atom)
-> Option<&'a str>;
unsafe fn get_attr_vals_for_layout<'a>(&'a self, name: &Atom) -> Vec<&'a str>;
}
#[inline]
#[allow(unsafe_code)]
pub unsafe fn get_attr_for_layout<'a>(elem: &'a Element, namespace: &Namespace, name: &Atom)
-> Option<LayoutJS<Attr>> {
// cast to point to T in RefCell<T> directly
let attrs = elem.attrs.borrow_for_layout();
attrs.iter().find(|attr| {
let attr = attr.to_layout();
*name == attr.local_name_atom_forever() &&
(*attr.unsafe_get()).namespace() == namespace
}).map(|attr| attr.to_layout())
}
#[allow(unsafe_code)]
impl RawLayoutElementHelpers for Element {
#[inline]
unsafe fn get_attr_for_layout<'a>(&'a self, namespace: &Namespace, name: &Atom)
-> Option<&'a AttrValue> {
get_attr_for_layout(self, namespace, name).map(|attr| {
attr.value_forever()
})
}
unsafe fn get_attr_val_for_layout<'a>(&'a self, namespace: &Namespace, name: &Atom)
-> Option<&'a str> {
get_attr_for_layout(self, namespace, name).map(|attr| {
attr.value_ref_forever()
})
}
#[inline]
unsafe fn get_attr_vals_for_layout<'a>(&'a self, name: &Atom) -> Vec<&'a str> {
let attrs = self.attrs.borrow_for_layout();
attrs.iter().filter_map(|attr| {
let attr = attr.to_layout();
if *name == attr.local_name_atom_forever() {
Some(attr.value_ref_forever())
} else {
None
}
}).collect()
}
}
pub trait LayoutElementHelpers {
#[allow(unsafe_code)]
unsafe fn get_attr_atom_for_layout(&self, namespace: &Namespace, name: &Atom) -> Option<Atom>;
#[allow(unsafe_code)]
unsafe fn has_class_for_layout(&self, name: &Atom) -> bool;
#[allow(unsafe_code)]
unsafe fn get_classes_for_layout(&self) -> Option<&'static [Atom]>;
#[allow(unsafe_code)]
unsafe fn synthesize_presentational_hints_for_legacy_attributes<V>(&self, &mut V)
where V: VecLike<DeclarationBlock<Vec<PropertyDeclaration>>>;
#[allow(unsafe_code)]
unsafe fn get_unsigned_integer_attribute_for_layout(&self, attribute: UnsignedIntegerAttribute)
-> Option<u32>;
#[allow(unsafe_code)]
unsafe fn html_element_in_html_document_for_layout(&self) -> bool;
#[allow(unsafe_code)]
unsafe fn has_attr_for_layout(&self, namespace: &Namespace, name: &Atom) -> bool;
fn id_attribute(&self) -> *const Option<Atom>;
fn style_attribute(&self) -> *const Option<PropertyDeclarationBlock>;
fn local_name(&self) -> &Atom;
fn namespace(&self) -> &Namespace;
fn get_checked_state_for_layout(&self) -> bool;
fn get_indeterminate_state_for_layout(&self) -> bool;
fn get_state_for_layout(&self) -> ElementState;
}
impl LayoutElementHelpers for LayoutJS<Element> {
#[allow(unsafe_code)]
#[inline]
unsafe fn get_attr_atom_for_layout(&self, namespace: &Namespace, name: &Atom)
-> Option<Atom> {
get_attr_for_layout(&*self.unsafe_get(), namespace, name).and_then(|attr| {
attr.value_atom_forever()
})
}
#[allow(unsafe_code)]
#[inline]
unsafe fn has_class_for_layout(&self, name: &Atom) -> bool {
get_attr_for_layout(&*self.unsafe_get(), &ns!(""), &atom!("class")).map_or(false, |attr| {
attr.value_tokens_forever().unwrap().iter().any(|atom| atom == name)
})
}
#[allow(unsafe_code)]
#[inline]
unsafe fn get_classes_for_layout(&self) -> Option<&'static [Atom]> {
get_attr_for_layout(&*self.unsafe_get(), &ns!(""), &atom!("class")).map(|attr| {
attr.value_tokens_forever().unwrap()
})
}
#[allow(unsafe_code)]
unsafe fn synthesize_presentational_hints_for_legacy_attributes<V>(&self, hints: &mut V)
where V: VecLike<DeclarationBlock<Vec<PropertyDeclaration>>>
{
let bgcolor = if let Some(this) = self.downcast::<HTMLBodyElement>() {
(*this.unsafe_get()).get_background_color()
} else if let Some(this) = self.downcast::<HTMLTableElement>() {
(*this.unsafe_get()).get_background_color()
} else if let Some(this) = self.downcast::<HTMLTableCellElement>() {
this.get_background_color()
} else if let Some(this) = self.downcast::<HTMLTableRowElement>() {
(*this.unsafe_get()).get_background_color()
} else if let Some(this) = self.downcast::<HTMLTableSectionElement>() {
(*this.unsafe_get()).get_background_color()
} else {
None
};
if let Some(color) = bgcolor {
hints.push(from_declaration(
PropertyDeclaration::BackgroundColor(DeclaredValue::Value(
CSSColor { parsed: Color::RGBA(color), authored: None }))));
}
let background = if let Some(this) = self.downcast::<HTMLBodyElement>() {
(*this.unsafe_get()).get_background()
} else {
None
};
if let Some(url) = background {
hints.push(from_declaration(
PropertyDeclaration::BackgroundImage(DeclaredValue::Value(
background_image::SpecifiedValue(Some(specified::Image::Url(url)))))));
}
let color = if let Some(this) = self.downcast::<HTMLFontElement>() {
(*this.unsafe_get()).get_color()
} else if let Some(this) = self.downcast::<HTMLBodyElement>() {
// https://html.spec.whatwg.org/multipage/#the-page:the-body-element-20
(*this.unsafe_get()).get_color()
} else {
None
};
if let Some(color) = color {
hints.push(from_declaration(
PropertyDeclaration::Color(DeclaredValue::Value(CSSRGBA {
parsed: color,
authored: None,
}))));
}
let font_family = if let Some(this) = self.downcast::<HTMLFontElement>() {
(*this.unsafe_get()).get_face()
} else {
None
};
if let Some(font_family) = font_family {
hints.push(from_declaration(
PropertyDeclaration::FontFamily(
DeclaredValue::Value(
font_family::computed_value::T(vec![
font_family::computed_value::FontFamily::FamilyName(
font_family)])))));
}
let font_size = if let Some(this) = self.downcast::<HTMLFontElement>() {
(*this.unsafe_get()).get_size()
} else {
None
};
if let Some(font_size) = font_size {
hints.push(from_declaration(
PropertyDeclaration::FontSize(
DeclaredValue::Value(
font_size::SpecifiedValue(
LengthOrPercentage::Length(font_size))))))
}
let cellspacing = if let Some(this) = self.downcast::<HTMLTableElement>() {
(*this.unsafe_get()).get_cellspacing()
} else {
None
};
if let Some(cellspacing) = cellspacing {
let width_value = specified::Length::Absolute(Au::from_px(cellspacing as i32));
hints.push(from_declaration(
PropertyDeclaration::BorderSpacing(DeclaredValue::Value(
border_spacing::SpecifiedValue {
horizontal: width_value,
vertical: width_value,
}))));
}
let size = if let Some(this) = self.downcast::<HTMLInputElement>() {
// FIXME(pcwalton): More use of atoms, please!
// FIXME(Ms2ger): this is nonsense! Invalid values also end up as
// a text field
match (*self.unsafe_get()).get_attr_val_for_layout(&ns!(""), &atom!("type")) {
Some("text") | Some("password") => {
match this.get_size_for_layout() {
0 => None,
s => Some(s as i32),
}
}
_ => None
}
} else {
None
};
if let Some(size) = size {
let value = specified::Length::ServoCharacterWidth(
specified::CharacterWidth(size));
hints.push(from_declaration(
PropertyDeclaration::Width(DeclaredValue::Value(
specified::LengthOrPercentageOrAuto::Length(value)))));
}
let width = if let Some(this) = self.downcast::<HTMLIFrameElement>() {
(*this.unsafe_get()).get_width()
} else if let Some(this) = self.downcast::<HTMLTableElement>() {
(*this.unsafe_get()).get_width()
} else if let Some(this) = self.downcast::<HTMLTableCellElement>() {
this.get_width()
} else {
LengthOrPercentageOrAuto::Auto
};
match width {
LengthOrPercentageOrAuto::Auto => {}
LengthOrPercentageOrAuto::Percentage(percentage) => {
let width_value =
specified::LengthOrPercentageOrAuto::Percentage(specified::Percentage(percentage));
hints.push(from_declaration(
PropertyDeclaration::Width(DeclaredValue::Value(width_value))));
}
LengthOrPercentageOrAuto::Length(length) => {<|fim▁hole|> specified::Length::Absolute(length));
hints.push(from_declaration(
PropertyDeclaration::Width(DeclaredValue::Value(width_value))));
}
}
let height = if let Some(this) = self.downcast::<HTMLIFrameElement>() {
(*this.unsafe_get()).get_height()
} else {
LengthOrPercentageOrAuto::Auto
};
match height {
LengthOrPercentageOrAuto::Auto => {}
LengthOrPercentageOrAuto::Percentage(percentage) => {
let height_value =
specified::LengthOrPercentageOrAuto::Percentage(specified::Percentage(percentage));
hints.push(from_declaration(
PropertyDeclaration::Height(DeclaredValue::Value(height_value))));
}
LengthOrPercentageOrAuto::Length(length) => {
let height_value = specified::LengthOrPercentageOrAuto::Length(
specified::Length::Absolute(length));
hints.push(from_declaration(
PropertyDeclaration::Height(DeclaredValue::Value(height_value))));
}
}
let cols = if let Some(this) = self.downcast::<HTMLTextAreaElement>() {
match (*this.unsafe_get()).get_cols_for_layout() {
0 => None,
c => Some(c as i32),
}
} else {
None
};
if let Some(cols) = cols {
// TODO(mttr) ServoCharacterWidth uses the size math for <input type="text">, but
// the math for <textarea> is a little different since we need to take
// scrollbar size into consideration (but we don't have a scrollbar yet!)
//
// https://html.spec.whatwg.org/multipage/#textarea-effective-width
let value = specified::Length::ServoCharacterWidth(specified::CharacterWidth(cols));
hints.push(from_declaration(
PropertyDeclaration::Width(DeclaredValue::Value(
specified::LengthOrPercentageOrAuto::Length(value)))));
}
let rows = if let Some(this) = self.downcast::<HTMLTextAreaElement>() {
match (*this.unsafe_get()).get_rows_for_layout() {
0 => None,
r => Some(r as i32),
}
} else {
None
};
if let Some(rows) = rows {
// TODO(mttr) This should take scrollbar size into consideration.
//
// https://html.spec.whatwg.org/multipage/#textarea-effective-height
let value = specified::Length::FontRelative(specified::FontRelativeLength::Em(rows as CSSFloat));
hints.push(from_declaration(
PropertyDeclaration::Height(DeclaredValue::Value(
specified::LengthOrPercentageOrAuto::Length(value)))));
}
let border = if let Some(this) = self.downcast::<HTMLTableElement>() {
(*this.unsafe_get()).get_border()
} else {
None
};
if let Some(border) = border {
let width_value = specified::Length::Absolute(Au::from_px(border as i32));
hints.push(from_declaration(
PropertyDeclaration::BorderTopWidth(DeclaredValue::Value(
longhands::border_top_width::SpecifiedValue(width_value)))));
hints.push(from_declaration(
PropertyDeclaration::BorderLeftWidth(DeclaredValue::Value(
longhands::border_left_width::SpecifiedValue(width_value)))));
hints.push(from_declaration(
PropertyDeclaration::BorderBottomWidth(DeclaredValue::Value(
longhands::border_bottom_width::SpecifiedValue(width_value)))));
hints.push(from_declaration(
PropertyDeclaration::BorderRightWidth(DeclaredValue::Value(
longhands::border_right_width::SpecifiedValue(width_value)))));
}
}
#[allow(unsafe_code)]
unsafe fn get_unsigned_integer_attribute_for_layout(&self,
attribute: UnsignedIntegerAttribute)
-> Option<u32> {
match attribute {
UnsignedIntegerAttribute::ColSpan => {
if let Some(this) = self.downcast::<HTMLTableCellElement>() {
this.get_colspan()
} else {
// Don't panic since `display` can cause this to be called on arbitrary
// elements.
None
}
}
}
}
#[inline]
#[allow(unsafe_code)]
unsafe fn html_element_in_html_document_for_layout(&self) -> bool {
if (*self.unsafe_get()).namespace != ns!(HTML) {
return false
}
self.upcast::<Node>().owner_doc_for_layout().is_html_document_for_layout()
}
#[allow(unsafe_code)]
unsafe fn has_attr_for_layout(&self, namespace: &Namespace, name: &Atom) -> bool {
get_attr_for_layout(&*self.unsafe_get(), namespace, name).is_some()
}
#[allow(unsafe_code)]
fn id_attribute(&self) -> *const Option<Atom> {
unsafe {
(*self.unsafe_get()).id_attribute.borrow_for_layout()
}
}
#[allow(unsafe_code)]
fn style_attribute(&self) -> *const Option<PropertyDeclarationBlock> {
unsafe {
(*self.unsafe_get()).style_attribute.borrow_for_layout()
}
}
#[allow(unsafe_code)]
fn local_name(&self) -> &Atom {
unsafe {
&(*self.unsafe_get()).local_name
}
}
#[allow(unsafe_code)]
fn namespace(&self) -> &Namespace {
unsafe {
&(*self.unsafe_get()).namespace
}
}
#[inline]
#[allow(unsafe_code)]
fn get_checked_state_for_layout(&self) -> bool {
// TODO option and menuitem can also have a checked state.
match self.downcast::<HTMLInputElement>() {
Some(input) => unsafe {
input.get_checked_state_for_layout()
},
None => false,
}
}
#[inline]
#[allow(unsafe_code)]
fn get_indeterminate_state_for_layout(&self) -> bool {
// TODO progress elements can also be matched with :indeterminate
match self.downcast::<HTMLInputElement>() {
Some(input) => unsafe {
input.get_indeterminate_state_for_layout()
},
None => false,
}
}
#[inline]
#[allow(unsafe_code)]
fn get_state_for_layout(&self) -> ElementState {
unsafe {
(*self.unsafe_get()).state.get()
}
}
}
#[derive(PartialEq, Eq, Copy, Clone, HeapSizeOf)]
pub enum StylePriority {
Important,
Normal,
}
impl Element {
pub fn html_element_in_html_document(&self) -> bool {
self.namespace == ns!(HTML) && self.upcast::<Node>().is_in_html_doc()
}
pub fn local_name(&self) -> &Atom {
&self.local_name
}
pub fn parsed_name(&self, mut name: DOMString) -> Atom {
if self.html_element_in_html_document() {
name.make_ascii_lowercase();
}
Atom::from_slice(&name)
}
pub fn namespace(&self) -> &Namespace {
&self.namespace
}
pub fn prefix(&self) -> &Option<DOMString> {
&self.prefix
}
pub fn attrs(&self) -> Ref<Vec<JS<Attr>>> {
self.attrs.borrow()
}
pub fn style_attribute(&self) -> &DOMRefCell<Option<PropertyDeclarationBlock>> {
&self.style_attribute
}
pub fn summarize(&self) -> Vec<AttrInfo> {
let attrs = self.Attributes();
let mut summarized = vec!();
for i in 0..attrs.Length() {
let attr = attrs.Item(i).unwrap();
summarized.push(attr.summarize());
}
summarized
}
pub fn is_void(&self) -> bool {
if self.namespace != ns!(HTML) {
return false
}
match &*self.local_name {
/* List of void elements from
https://html.spec.whatwg.org/multipage/#html-fragment-serialisation-algorithm */
"area" | "base" | "basefont" | "bgsound" | "br" | "col" | "embed" |
"frame" | "hr" | "img" | "input" | "keygen" | "link" | "menuitem" |
"meta" | "param" | "source" | "track" | "wbr" => true,
_ => false
}
}
pub fn remove_inline_style_property(&self, property: &str) {
let mut inline_declarations = self.style_attribute.borrow_mut();
if let &mut Some(ref mut declarations) = &mut *inline_declarations {
let index = declarations.normal
.iter()
.position(|decl| decl.matches(property));
if let Some(index) = index {
Arc::make_mut(&mut declarations.normal).remove(index);
return;
}
let index = declarations.important
.iter()
.position(|decl| decl.matches(property));
if let Some(index) = index {
Arc::make_mut(&mut declarations.important).remove(index);
return;
}
}
}
pub fn update_inline_style(&self,
property_decl: PropertyDeclaration,
style_priority: StylePriority) {
let mut inline_declarations = self.style_attribute().borrow_mut();
if let &mut Some(ref mut declarations) = &mut *inline_declarations {
let existing_declarations = if style_priority == StylePriority::Important {
&mut declarations.important
} else {
&mut declarations.normal
};
// Usually, the reference count will be 1 here. But transitions could make it greater
// than that.
let existing_declarations = Arc::make_mut(existing_declarations);
for declaration in &mut *existing_declarations {
if declaration.name() == property_decl.name() {
*declaration = property_decl;
return;
}
}
existing_declarations.push(property_decl);
return;
}
let (important, normal) = if style_priority == StylePriority::Important {
(vec!(property_decl), vec!())
} else {
(vec!(), vec!(property_decl))
};
*inline_declarations = Some(PropertyDeclarationBlock {
important: Arc::new(important),
normal: Arc::new(normal),
});
}
pub fn set_inline_style_property_priority(&self, properties: &[&str], style_priority: StylePriority) {
let mut inline_declarations = self.style_attribute().borrow_mut();
if let &mut Some(ref mut declarations) = &mut *inline_declarations {
let (from, to) = if style_priority == StylePriority::Important {
(&mut declarations.normal, &mut declarations.important)
} else {
(&mut declarations.important, &mut declarations.normal)
};
// Usually, the reference counts of `from` and `to` will be 1 here. But transitions
// could make them greater than that.
let from = Arc::make_mut(from);
let to = Arc::make_mut(to);
let mut new_from = Vec::new();
for declaration in from.drain(..) {
let name = declaration.name();
if properties.iter().any(|p| name == **p) {
to.push(declaration)
} else {
new_from.push(declaration)
}
}
mem::replace(from, new_from);
}
}
pub fn get_inline_style_declaration(&self, property: &Atom) -> Option<Ref<PropertyDeclaration>> {
Ref::filter_map(self.style_attribute.borrow(), |inline_declarations| {
inline_declarations.as_ref().and_then(|declarations| {
declarations.normal
.iter()
.chain(declarations.important.iter())
.find(|decl| decl.matches(&property))
})
})
}
pub fn get_important_inline_style_declaration(&self, property: &Atom)
-> Option<Ref<PropertyDeclaration>> {
Ref::filter_map(self.style_attribute.borrow(), |inline_declarations| {
inline_declarations.as_ref().and_then(|declarations| {
declarations.important
.iter()
.find(|decl| decl.matches(&property))
})
})
}
pub fn serialize(&self, traversal_scope: TraversalScope) -> Fallible<DOMString> {
let mut writer = vec![];
match serialize(&mut writer, &self.upcast::<Node>(),
SerializeOpts {
traversal_scope: traversal_scope,
.. Default::default()
}) {
Ok(()) => Ok(DOMString(String::from_utf8(writer).unwrap())),
Err(_) => panic!("Cannot serialize element"),
}
}
// https://html.spec.whatwg.org/multipage/#root-element
pub fn get_root_element(&self) -> Root<Element> {
self.upcast::<Node>().inclusive_ancestors().filter_map(Root::downcast).last()
.expect("We know inclusive_ancestors will return `self` which is an element")
}
// https://dom.spec.whatwg.org/#locate-a-namespace-prefix
pub fn lookup_prefix(&self, namespace: Namespace) -> Option<DOMString> {
for node in self.upcast::<Node>().inclusive_ancestors() {
match node.downcast::<Element>() {
Some(element) => {
// Step 1.
if *element.namespace() == namespace {
if let Some(prefix) = element.GetPrefix() {
return Some(prefix);
}
}
// Step 2.
let attrs = element.Attributes();
for i in 0..attrs.Length() {
let attr = attrs.Item(i).unwrap();
if *attr.prefix() == Some(atom!("xmlns")) &&
**attr.value() == *namespace.0 {
return Some(attr.LocalName());
}
}
},
None => return None,
}
}
None
}
}
impl Element {
pub fn is_focusable_area(&self) -> bool {
if self.is_actually_disabled() {
return false;
}
// TODO: Check whether the element is being rendered (i.e. not hidden).
let node = self.upcast::<Node>();
if node.get_flag(SEQUENTIALLY_FOCUSABLE) {
return true;
}
// https://html.spec.whatwg.org/multipage/#specially-focusable
match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAnchorElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTextAreaElement)) => {
true
}
_ => false
}
}
pub fn is_actually_disabled(&self) -> bool {
let node = self.upcast::<Node>();
match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLButtonElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTextAreaElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLOptionElement)) => {
self.get_disabled_state()
}
// TODO:
// an optgroup element that has a disabled attribute
// a menuitem element that has a disabled attribute
// a fieldset element that is a disabled fieldset
_ => false
}
}
}
impl Element {
pub fn push_new_attribute(&self,
local_name: Atom,
value: AttrValue,
name: Atom,
namespace: Namespace,
prefix: Option<Atom>) {
let window = window_from_node(self);
let in_empty_ns = namespace == ns!("");
let attr = Attr::new(&window, local_name, value, name, namespace, prefix, Some(self));
self.attrs.borrow_mut().push(JS::from_rooted(&attr));
if in_empty_ns {
vtable_for(self.upcast()).attribute_mutated(&attr, AttributeMutation::Set(None));
}
}
pub fn get_attribute(&self, namespace: &Namespace, local_name: &Atom) -> Option<Root<Attr>> {
self.attrs.borrow().iter().find(|attr| {
attr.local_name() == local_name && attr.namespace() == namespace
}).map(|js| Root::from_ref(&**js))
}
// https://dom.spec.whatwg.org/#concept-element-attributes-get-by-name
pub fn get_attribute_by_name(&self, name: DOMString) -> Option<Root<Attr>> {
let name = &self.parsed_name(name);
self.attrs.borrow().iter().find(|a| a.name() == name).map(|js| Root::from_ref(&**js))
}
pub fn set_attribute_from_parser(&self,
qname: QualName,
value: DOMString,
prefix: Option<Atom>) {
// Don't set if the attribute already exists, so we can handle add_attrs_if_missing
if self.attrs.borrow().iter()
.any(|a| *a.local_name() == qname.local && *a.namespace() == qname.ns) {
return;
}
let name = match prefix {
None => qname.local.clone(),
Some(ref prefix) => {
let name = format!("{}:{}", &**prefix, &*qname.local);
Atom::from_slice(&name)
},
};
let value = self.parse_attribute(&qname.ns, &qname.local, value);
self.push_new_attribute(qname.local, value, name, qname.ns, prefix);
}
pub fn set_attribute(&self, name: &Atom, value: AttrValue) {
assert!(&**name == name.to_ascii_lowercase());
assert!(!name.contains(":"));
self.set_first_matching_attribute(
name.clone(), value, name.clone(), ns!(""), None,
|attr| attr.local_name() == name);
}
// https://html.spec.whatwg.org/multipage/#attr-data-*
pub fn set_custom_attribute(&self, name: DOMString, value: DOMString) -> ErrorResult {
// Step 1.
match xml_name_type(&name) {
InvalidXMLName => return Err(Error::InvalidCharacter),
_ => {}
}
// Steps 2-5.
let name = Atom::from_slice(&name);
let value = self.parse_attribute(&ns!(""), &name, value);
self.set_first_matching_attribute(
name.clone(), value, name.clone(), ns!(""), None,
|attr| *attr.name() == name && *attr.namespace() == ns!(""));
Ok(())
}
fn set_first_matching_attribute<F>(&self,
local_name: Atom,
value: AttrValue,
name: Atom,
namespace: Namespace,
prefix: Option<Atom>,
find: F)
where F: Fn(&Attr)
-> bool {
let attr = self.attrs.borrow().iter()
.find(|attr| find(&attr)).map(|js| Root::from_ref(&**js));
if let Some(attr) = attr {
attr.set_value(value, self);
} else {
self.push_new_attribute(local_name, value, name, namespace, prefix);
};
}
pub fn parse_attribute(&self, namespace: &Namespace, local_name: &Atom,
value: DOMString) -> AttrValue {
if *namespace == ns!("") {
vtable_for(self.upcast()).parse_plain_attribute(local_name, value)
} else {
AttrValue::String(value)
}
}
pub fn remove_attribute(&self, namespace: &Namespace, local_name: &Atom)
-> Option<Root<Attr>> {
self.remove_first_matching_attribute(|attr| {
attr.namespace() == namespace && attr.local_name() == local_name
})
}
pub fn remove_attribute_by_name(&self, name: &Atom) -> Option<Root<Attr>> {
self.remove_first_matching_attribute(|attr| attr.name() == name)
}
fn remove_first_matching_attribute<F>(&self, find: F) -> Option<Root<Attr>>
where F: Fn(&Attr) -> bool
{
let idx = self.attrs.borrow().iter().position(|attr| find(&attr));
idx.map(|idx| {
let attr = Root::from_ref(&*(*self.attrs.borrow())[idx]);
self.attrs.borrow_mut().remove(idx);
attr.set_owner(None);
if attr.namespace() == &ns!("") {
vtable_for(self.upcast()).attribute_mutated(&attr, AttributeMutation::Removed);
}
attr
})
}
pub fn has_class(&self, name: &Atom) -> bool {
let quirks_mode = document_from_node(self).quirks_mode();
let is_equal = |lhs: &Atom, rhs: &Atom| match quirks_mode {
NoQuirks | LimitedQuirks => lhs == rhs,
Quirks => lhs.eq_ignore_ascii_case(&rhs)
};
self.get_attribute(&ns!(""), &atom!("class")).map(|attr| {
attr.value().as_tokens().iter().any(|atom| is_equal(name, atom))
}).unwrap_or(false)
}
pub fn set_atomic_attribute(&self, local_name: &Atom, value: DOMString) {
assert!(&**local_name == local_name.to_ascii_lowercase());
let value = AttrValue::from_atomic(value);
self.set_attribute(local_name, value);
}
pub fn has_attribute(&self, local_name: &Atom) -> bool {
assert!(local_name.bytes().all(|b| b.to_ascii_lowercase() == b));
self.attrs.borrow().iter().any(|attr| {
attr.local_name() == local_name && attr.namespace() == &ns!("")
})
}
pub fn set_bool_attribute(&self, local_name: &Atom, value: bool) {
if self.has_attribute(local_name) == value { return; }
if value {
self.set_string_attribute(local_name, DOMString::new());
} else {
self.remove_attribute(&ns!(""), local_name);
}
}
pub fn get_url_attribute(&self, local_name: &Atom) -> DOMString {
assert!(&**local_name == local_name.to_ascii_lowercase());
if !self.has_attribute(local_name) {
return DOMString::new();
}
let url = self.get_string_attribute(local_name);
let doc = document_from_node(self);
let base = doc.url();
// https://html.spec.whatwg.org/multipage/#reflect
// XXXManishearth this doesn't handle `javascript:` urls properly
DOMString(match UrlParser::new().base_url(&base).parse(&url) {
Ok(parsed) => parsed.serialize(),
Err(_) => "".to_owned()
})
}
pub fn set_url_attribute(&self, local_name: &Atom, value: DOMString) {
self.set_string_attribute(local_name, value);
}
pub fn get_string_attribute(&self, local_name: &Atom) -> DOMString {
match self.get_attribute(&ns!(""), local_name) {
Some(x) => x.Value(),
None => DOMString::new()
}
}
pub fn set_string_attribute(&self, local_name: &Atom, value: DOMString) {
assert!(&**local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name, AttrValue::String(value));
}
pub fn get_tokenlist_attribute(&self, local_name: &Atom) -> Vec<Atom> {
self.get_attribute(&ns!(""), local_name).map(|attr| {
attr.r()
.value()
.as_tokens()
.to_vec()
}).unwrap_or(vec!())
}
pub fn set_tokenlist_attribute(&self, local_name: &Atom, value: DOMString) {
assert!(&**local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name, AttrValue::from_serialized_tokenlist(value));
}
pub fn set_atomic_tokenlist_attribute(&self, local_name: &Atom, tokens: Vec<Atom>) {
assert!(&**local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name, AttrValue::from_atomic_tokens(tokens));
}
pub fn get_uint_attribute(&self, local_name: &Atom, default: u32) -> u32 {
assert!(local_name.chars().all(|ch| {
!ch.is_ascii() || ch.to_ascii_lowercase() == ch
}));
let attribute = self.get_attribute(&ns!(""), local_name);
match attribute {
Some(ref attribute) => {
match *attribute.value() {
AttrValue::UInt(_, value) => value,
_ => panic!("Expected an AttrValue::UInt: \
implement parse_plain_attribute"),
}
}
None => default,
}
}
pub fn set_uint_attribute(&self, local_name: &Atom, value: u32) {
assert!(&**local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name, AttrValue::UInt(DOMString(value.to_string()), value));
}
}
impl ElementMethods for Element {
// https://dom.spec.whatwg.org/#dom-element-namespaceuri
fn GetNamespaceURI(&self) -> Option<DOMString> {
Node::namespace_to_string(self.namespace.clone())
}
// https://dom.spec.whatwg.org/#dom-element-localname
fn LocalName(&self) -> DOMString {
DOMString((*self.local_name).to_owned())
}
// https://dom.spec.whatwg.org/#dom-element-prefix
fn GetPrefix(&self) -> Option<DOMString> {
self.prefix.clone()
}
// https://dom.spec.whatwg.org/#dom-element-tagname
fn TagName(&self) -> DOMString {
let qualified_name = match self.prefix {
Some(ref prefix) => {
Cow::Owned(format!("{}:{}", &**prefix, &*self.local_name))
},
None => Cow::Borrowed(&*self.local_name)
};
DOMString(if self.html_element_in_html_document() {
qualified_name.to_ascii_uppercase()
} else {
qualified_name.into_owned()
})
}
// https://dom.spec.whatwg.org/#dom-element-id
fn Id(&self) -> DOMString {
self.get_string_attribute(&atom!("id"))
}
// https://dom.spec.whatwg.org/#dom-element-id
fn SetId(&self, id: DOMString) {
self.set_atomic_attribute(&atom!("id"), id);
}
// https://dom.spec.whatwg.org/#dom-element-classname
fn ClassName(&self) -> DOMString {
self.get_string_attribute(&atom!("class"))
}
// https://dom.spec.whatwg.org/#dom-element-classname
fn SetClassName(&self, class: DOMString) {
self.set_tokenlist_attribute(&atom!("class"), class);
}
// https://dom.spec.whatwg.org/#dom-element-classlist
fn ClassList(&self) -> Root<DOMTokenList> {
self.class_list.or_init(|| DOMTokenList::new(self, &atom!("class")))
}
// https://dom.spec.whatwg.org/#dom-element-attributes
fn Attributes(&self) -> Root<NamedNodeMap> {
self.attr_list.or_init(|| NamedNodeMap::new(&window_from_node(self), self))
}
// https://dom.spec.whatwg.org/#dom-element-getattribute
fn GetAttribute(&self, name: DOMString) -> Option<DOMString> {
self.GetAttributeNode(name)
.map(|s| s.Value())
}
// https://dom.spec.whatwg.org/#dom-element-getattributens
fn GetAttributeNS(&self,
namespace: Option<DOMString>,
local_name: DOMString) -> Option<DOMString> {
self.GetAttributeNodeNS(namespace, local_name)
.map(|attr| attr.Value())
}
// https://dom.spec.whatwg.org/#dom-element-getattributenode
fn GetAttributeNode(&self, name: DOMString) -> Option<Root<Attr>> {
self.get_attribute_by_name(name)
}
// https://dom.spec.whatwg.org/#dom-element-getattributenodens
fn GetAttributeNodeNS(&self,
namespace: Option<DOMString>,
local_name: DOMString) -> Option<Root<Attr>> {
let namespace = &namespace_from_domstring(namespace);
self.get_attribute(namespace, &Atom::from_slice(&local_name))
}
// https://dom.spec.whatwg.org/#dom-element-setattribute
fn SetAttribute(&self,
name: DOMString,
value: DOMString) -> ErrorResult {
// Step 1.
if xml_name_type(&name) == InvalidXMLName {
return Err(Error::InvalidCharacter);
}
// Step 2.
let name = self.parsed_name(name);
// Step 3-5.
let value = self.parse_attribute(&ns!(""), &name, value);
self.set_first_matching_attribute(
name.clone(), value, name.clone(), ns!(""), None,
|attr| *attr.name() == name);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-element-setattributens
fn SetAttributeNS(&self,
namespace: Option<DOMString>,
qualified_name: DOMString,
value: DOMString) -> ErrorResult {
let (namespace, prefix, local_name) =
try!(validate_and_extract(namespace, &qualified_name));
let qualified_name = Atom::from_slice(&qualified_name);
let value = self.parse_attribute(&namespace, &local_name, value);
self.set_first_matching_attribute(
local_name.clone(), value, qualified_name, namespace.clone(), prefix,
|attr| *attr.local_name() == local_name && *attr.namespace() == namespace);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-element-removeattribute
fn RemoveAttribute(&self, name: DOMString) {
let name = self.parsed_name(name);
self.remove_attribute_by_name(&name);
}
// https://dom.spec.whatwg.org/#dom-element-removeattributens
fn RemoveAttributeNS(&self,
namespace: Option<DOMString>,
local_name: DOMString) {
let namespace = namespace_from_domstring(namespace);
let local_name = Atom::from_slice(&local_name);
self.remove_attribute(&namespace, &local_name);
}
// https://dom.spec.whatwg.org/#dom-element-hasattribute
fn HasAttribute(&self, name: DOMString) -> bool {
self.GetAttribute(name).is_some()
}
// https://dom.spec.whatwg.org/#dom-element-hasattributens
fn HasAttributeNS(&self,
namespace: Option<DOMString>,
local_name: DOMString) -> bool {
self.GetAttributeNS(namespace, local_name).is_some()
}
// https://dom.spec.whatwg.org/#dom-element-getelementsbytagname
fn GetElementsByTagName(&self, localname: DOMString) -> Root<HTMLCollection> {
let window = window_from_node(self);
HTMLCollection::by_tag_name(window.r(), self.upcast(), localname)
}
// https://dom.spec.whatwg.org/#dom-element-getelementsbytagnamens
fn GetElementsByTagNameNS(&self, maybe_ns: Option<DOMString>,
localname: DOMString) -> Root<HTMLCollection> {
let window = window_from_node(self);
HTMLCollection::by_tag_name_ns(window.r(), self.upcast(), localname, maybe_ns)
}
// https://dom.spec.whatwg.org/#dom-element-getelementsbyclassname
fn GetElementsByClassName(&self, classes: DOMString) -> Root<HTMLCollection> {
let window = window_from_node(self);
HTMLCollection::by_class_name(window.r(), self.upcast(), classes)
}
// https://drafts.csswg.org/cssom-view/#dom-element-getclientrects
fn GetClientRects(&self) -> Root<DOMRectList> {
let win = window_from_node(self);
let raw_rects = self.upcast::<Node>().get_content_boxes();
let rects = raw_rects.iter().map(|rect| {
DOMRect::new(GlobalRef::Window(win.r()),
rect.origin.x.to_f64_px(),
rect.origin.y.to_f64_px(),
rect.size.width.to_f64_px(),
rect.size.height.to_f64_px())
});
DOMRectList::new(win.r(), rects)
}
// https://drafts.csswg.org/cssom-view/#dom-element-getboundingclientrect
fn GetBoundingClientRect(&self) -> Root<DOMRect> {
let win = window_from_node(self);
let rect = self.upcast::<Node>().get_bounding_content_box();
DOMRect::new(GlobalRef::Window(win.r()),
rect.origin.x.to_f64_px(),
rect.origin.y.to_f64_px(),
rect.size.width.to_f64_px(),
rect.size.height.to_f64_px())
}
// https://drafts.csswg.org/cssom-view/#dom-element-clienttop
fn ClientTop(&self) -> i32 {
self.upcast::<Node>().get_client_rect().origin.y
}
// https://drafts.csswg.org/cssom-view/#dom-element-clientleft
fn ClientLeft(&self) -> i32 {
self.upcast::<Node>().get_client_rect().origin.x
}
// https://drafts.csswg.org/cssom-view/#dom-element-clientwidth
fn ClientWidth(&self) -> i32 {
self.upcast::<Node>().get_client_rect().size.width
}
// https://drafts.csswg.org/cssom-view/#dom-element-clientheight
fn ClientHeight(&self) -> i32 {
self.upcast::<Node>().get_client_rect().size.height
}
/// https://w3c.github.io/DOM-Parsing/#widl-Element-innerHTML
fn GetInnerHTML(&self) -> Fallible<DOMString> {
//XXX TODO: XML case
self.serialize(ChildrenOnly)
}
/// https://w3c.github.io/DOM-Parsing/#widl-Element-innerHTML
fn SetInnerHTML(&self, value: DOMString) -> Fallible<()> {
let context_node = self.upcast::<Node>();
// Step 1.
let frag = try!(context_node.parse_fragment(value));
// Step 2.
// https://github.com/w3c/DOM-Parsing/issues/1
let target = if let Some(template) = self.downcast::<HTMLTemplateElement>() {
Root::upcast(template.Content())
} else {
Root::from_ref(context_node)
};
Node::replace_all(Some(frag.upcast()), &target);
Ok(())
}
// https://dvcs.w3.org/hg/innerhtml/raw-file/tip/index.html#widl-Element-outerHTML
fn GetOuterHTML(&self) -> Fallible<DOMString> {
self.serialize(IncludeNode)
}
// https://dvcs.w3.org/hg/innerhtml/raw-file/tip/index.html#widl-Element-outerHTML
fn SetOuterHTML(&self, value: DOMString) -> Fallible<()> {
let context_document = document_from_node(self);
let context_node = self.upcast::<Node>();
// Step 1.
let context_parent = match context_node.GetParentNode() {
None => {
// Step 2.
return Ok(());
},
Some(parent) => parent,
};
let parent = match context_parent.type_id() {
// Step 3.
NodeTypeId::Document => return Err(Error::NoModificationAllowed),
// Step 4.
NodeTypeId::DocumentFragment => {
let body_elem = Element::create(QualName::new(ns!(HTML), atom!(body)),
None, context_document.r(),
ElementCreator::ScriptCreated);
Root::upcast(body_elem)
},
_ => context_node.GetParentNode().unwrap()
};
// Step 5.
let frag = try!(parent.parse_fragment(value));
// Step 6.
try!(context_parent.ReplaceChild(frag.upcast(), context_node));
Ok(())
}
// https://dom.spec.whatwg.org/#dom-nondocumenttypechildnode-previouselementsibling
fn GetPreviousElementSibling(&self) -> Option<Root<Element>> {
self.upcast::<Node>().preceding_siblings().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-nondocumenttypechildnode-nextelementsibling
fn GetNextElementSibling(&self) -> Option<Root<Element>> {
self.upcast::<Node>().following_siblings().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-children
fn Children(&self) -> Root<HTMLCollection> {
let window = window_from_node(self);
HTMLCollection::children(window.r(), self.upcast())
}
// https://dom.spec.whatwg.org/#dom-parentnode-firstelementchild
fn GetFirstElementChild(&self) -> Option<Root<Element>> {
self.upcast::<Node>().child_elements().next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-lastelementchild
fn GetLastElementChild(&self) -> Option<Root<Element>> {
self.upcast::<Node>().rev_children().filter_map(Root::downcast::<Element>).next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-childelementcount
fn ChildElementCount(&self) -> u32 {
self.upcast::<Node>().child_elements().count() as u32
}
// https://dom.spec.whatwg.org/#dom-parentnode-prepend
fn Prepend(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().prepend(nodes)
}
// https://dom.spec.whatwg.org/#dom-parentnode-append
fn Append(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().append(nodes)
}
// https://dom.spec.whatwg.org/#dom-parentnode-queryselector
fn QuerySelector(&self, selectors: DOMString) -> Fallible<Option<Root<Element>>> {
let root = self.upcast::<Node>();
root.query_selector(selectors)
}
// https://dom.spec.whatwg.org/#dom-parentnode-queryselectorall
fn QuerySelectorAll(&self, selectors: DOMString) -> Fallible<Root<NodeList>> {
let root = self.upcast::<Node>();
root.query_selector_all(selectors)
}
// https://dom.spec.whatwg.org/#dom-childnode-before
fn Before(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().before(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-after
fn After(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().after(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-replacewith
fn ReplaceWith(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().replace_with(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-remove
fn Remove(&self) {
self.upcast::<Node>().remove_self();
}
// https://dom.spec.whatwg.org/#dom-element-matches
fn Matches(&self, selectors: DOMString) -> Fallible<bool> {
match parse_author_origin_selector_list_from_str(&selectors) {
Err(()) => Err(Error::Syntax),
Ok(ref selectors) => {
Ok(matches(selectors, &Root::from_ref(self), None))
}
}
}
// https://dom.spec.whatwg.org/#dom-element-webkitmatchesselector
fn WebkitMatchesSelector(&self, selectors: DOMString) -> Fallible<bool> {
self.Matches(selectors)
}
// https://dom.spec.whatwg.org/#dom-element-closest
fn Closest(&self, selectors: DOMString) -> Fallible<Option<Root<Element>>> {
match parse_author_origin_selector_list_from_str(&selectors) {
Err(()) => Err(Error::Syntax),
Ok(ref selectors) => {
let root = self.upcast::<Node>();
for element in root.inclusive_ancestors() {
if let Some(element) = Root::downcast::<Element>(element) {
if matches(selectors, &element, None) {
return Ok(Some(element));
}
}
}
Ok(None)
}
}
}
}
impl VirtualMethods for Element {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<Node>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
let node = self.upcast::<Node>();
let doc = node.owner_doc();
let damage = match attr.local_name() {
&atom!(style) => {
// Modifying the `style` attribute might change style.
*self.style_attribute.borrow_mut() =
mutation.new_value(attr).map(|value| {
parse_style_attribute(&value, &doc.base_url())
});
NodeDamage::NodeStyleDamaged
},
&atom!(class) => {
// Modifying a class can change style.
NodeDamage::NodeStyleDamaged
},
&atom!(id) => {
*self.id_attribute.borrow_mut() =
mutation.new_value(attr).and_then(|value| {
let value = value.as_atom();
if value != &atom!("") {
Some(value.clone())
} else {
None
}
});
if node.is_in_doc() {
let value = attr.value().as_atom().clone();
match mutation {
AttributeMutation::Set(old_value) => {
if let Some(old_value) = old_value {
let old_value = old_value.as_atom().clone();
doc.unregister_named_element(self, old_value);
}
if value != atom!("") {
doc.register_named_element(self, value);
}
},
AttributeMutation::Removed => {
if value != atom!("") {
doc.unregister_named_element(self, value);
}
}
}
}
NodeDamage::NodeStyleDamaged
},
_ => {
// Modifying any other attribute might change arbitrary things.
NodeDamage::OtherNodeDamage
},
};
if node.is_in_doc() {
doc.content_changed(node, damage);
}
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match name {
&atom!("id") => AttrValue::from_atomic(value),
&atom!("class") => AttrValue::from_serialized_tokenlist(value),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if !tree_in_doc { return; }
if let Some(ref value) = *self.id_attribute.borrow() {
let doc = document_from_node(self);
doc.register_named_element(self, value.clone());
}
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(tree_in_doc);
}
if !tree_in_doc { return; }
if let Some(ref value) = *self.id_attribute.borrow() {
let doc = document_from_node(self);
doc.unregister_named_element(self, value.clone());
}
}
}
macro_rules! state_getter {
($(
$(#[$Flag_attr: meta])*
state $css: expr => $variant: ident / $method: ident /
$flag: ident = $value: expr,
)+) => {
$( fn $method(&self) -> bool { Element::get_state(self).contains($flag) } )+
}
}
impl<'a> ::selectors::Element for Root<Element> {
fn parent_element(&self) -> Option<Root<Element>> {
self.upcast::<Node>().GetParentElement()
}
fn first_child_element(&self) -> Option<Root<Element>> {
self.node.child_elements().next()
}
fn last_child_element(&self) -> Option<Root<Element>> {
self.node.rev_children().filter_map(Root::downcast).next()
}
fn prev_sibling_element(&self) -> Option<Root<Element>> {
self.node.preceding_siblings().filter_map(Root::downcast).next()
}
fn next_sibling_element(&self) -> Option<Root<Element>> {
self.node.following_siblings().filter_map(Root::downcast).next()
}
fn is_root(&self) -> bool {
match self.node.GetParentNode() {
None => false,
Some(node) => node.is::<Document>(),
}
}
fn is_empty(&self) -> bool {
self.node.children().all(|node| !node.is::<Element>() && match node.downcast::<Text>() {
None => true,
Some(text) => text.upcast::<CharacterData>().data().is_empty()
})
}
fn is_link(&self) -> bool {
// FIXME: This is HTML only.
let node = self.upcast::<Node>();
match node.type_id() {
// https://html.spec.whatwg.org/multipage/#selector-link
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAnchorElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAreaElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLLinkElement)) => {
self.has_attribute(&atom!("href"))
},
_ => false,
}
}
#[inline]
fn is_unvisited_link(&self) -> bool {
self.is_link()
}
#[inline]
fn is_visited_link(&self) -> bool {
false
}
fn get_local_name(&self) -> &Atom {
self.local_name()
}
fn get_namespace(&self) -> &Namespace {
self.namespace()
}
state_pseudo_classes!(state_getter);
fn get_id(&self) -> Option<Atom> {
self.id_attribute.borrow().clone()
}
fn has_class(&self, name: &Atom) -> bool {
Element::has_class(&**self, name)
}
fn each_class<F>(&self, mut callback: F)
where F: FnMut(&Atom)
{
if let Some(ref attr) = self.get_attribute(&ns!(""), &atom!("class")) {
let tokens = attr.value();
let tokens = tokens.as_tokens();
for token in tokens {
callback(token);
}
}
}
fn has_servo_nonzero_border(&self) -> bool {
match self.downcast::<HTMLTableElement>() {
None => false,
Some(this) => {
match this.get_border() {
None | Some(0) => false,
Some(_) => true,
}
}
}
}
fn match_attr<F>(&self, attr: &AttrSelector, test: F) -> bool
where F: Fn(&str) -> bool
{
let local_name = {
if self.is_html_element_in_html_document() {
&attr.lower_name
} else {
&attr.name
}
};
match attr.namespace {
NamespaceConstraint::Specific(ref ns) => {
self.get_attribute(ns, local_name)
.map_or(false, |attr| {
test(&attr.value())
})
},
NamespaceConstraint::Any => {
self.attrs.borrow().iter().any(|attr| {
attr.local_name() == local_name && test(&attr.value())
})
}
}
}
fn is_html_element_in_html_document(&self) -> bool {
self.html_element_in_html_document()
}
}
impl Element {
pub fn as_maybe_activatable(&self) -> Option<&Activatable> {
let element = match self.upcast::<Node>().type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) => {
let element = self.downcast::<HTMLInputElement>().unwrap();
Some(element as &Activatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAnchorElement)) => {
let element = self.downcast::<HTMLAnchorElement>().unwrap();
Some(element as &Activatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLLabelElement)) => {
let element = self.downcast::<HTMLLabelElement>().unwrap();
Some(element as &Activatable)
},
_ => {
None
}
};
element.and_then(|elem| {
if elem.is_instance_activatable() {
Some(elem)
} else {
None
}
})
}
pub fn click_in_progress(&self) -> bool {
self.upcast::<Node>().get_flag(CLICK_IN_PROGRESS)
}
pub fn set_click_in_progress(&self, click: bool) {
self.upcast::<Node>().set_flag(CLICK_IN_PROGRESS, click)
}
// https://html.spec.whatwg.org/multipage/#nearest-activatable-element
pub fn nearest_activable_element(&self) -> Option<Root<Element>> {
match self.as_maybe_activatable() {
Some(el) => Some(Root::from_ref(el.as_element())),
None => {
let node = self.upcast::<Node>();
for node in node.ancestors() {
if let Some(node) = node.downcast::<Element>() {
if node.as_maybe_activatable().is_some() {
return Some(Root::from_ref(node))
}
}
}
None
}
}
}
/// Please call this method *only* for real click events
///
/// https://html.spec.whatwg.org/multipage/#run-authentic-click-activation-steps
///
/// Use an element's synthetic click activation (or handle_event) for any script-triggered clicks.
/// If the spec says otherwise, check with Manishearth first
pub fn authentic_click_activation(&self, event: &Event) {
// Not explicitly part of the spec, however this helps enforce the invariants
// required to save state between pre-activation and post-activation
// since we cannot nest authentic clicks (unlike synthetic click activation, where
// the script can generate more click events from the handler)
assert!(!self.click_in_progress());
let target = self.upcast();
// Step 2 (requires canvas support)
// Step 3
self.set_click_in_progress(true);
// Step 4
let e = self.nearest_activable_element();
match e {
Some(ref el) => match el.as_maybe_activatable() {
Some(elem) => {
// Step 5-6
elem.pre_click_activation();
event.fire(target);
if !event.DefaultPrevented() {
// post click activation
elem.activation_behavior(event, target);
} else {
elem.canceled_activation();
}
}
// Step 6
None => { event.fire(target); }
},
// Step 6
None => { event.fire(target); }
}
// Step 7
self.set_click_in_progress(false);
}
pub fn get_state(&self) -> ElementState {
self.state.get()
}
pub fn set_state(&self, which: ElementState, value: bool) {
let mut state = self.state.get();
if state.contains(which) == value {
return
}
let node = self.upcast::<Node>();
node.owner_doc().element_state_will_change(self);
match value {
true => state.insert(which),
false => state.remove(which),
};
self.state.set(state);
}
pub fn get_active_state(&self) -> bool {
self.state.get().contains(IN_ACTIVE_STATE)
}
pub fn set_active_state(&self, value: bool) {
self.set_state(IN_ACTIVE_STATE, value)
}
pub fn get_focus_state(&self) -> bool {
self.state.get().contains(IN_FOCUS_STATE)
}
pub fn set_focus_state(&self, value: bool) {
self.set_state(IN_FOCUS_STATE, value)
}
pub fn get_hover_state(&self) -> bool {
self.state.get().contains(IN_HOVER_STATE)
}
pub fn set_hover_state(&self, value: bool) {
self.set_state(IN_HOVER_STATE, value)
}
pub fn get_enabled_state(&self) -> bool {
self.state.get().contains(IN_ENABLED_STATE)
}
pub fn set_enabled_state(&self, value: bool) {
self.set_state(IN_ENABLED_STATE, value)
}
pub fn get_disabled_state(&self) -> bool {
self.state.get().contains(IN_DISABLED_STATE)
}
pub fn set_disabled_state(&self, value: bool) {
self.set_state(IN_DISABLED_STATE, value)
}
}
impl Element {
pub fn check_ancestors_disabled_state_for_form_control(&self) {
let node = self.upcast::<Node>();
if self.get_disabled_state() { return; }
for ancestor in node.ancestors() {
let ancestor = ancestor;
let ancestor = ancestor.r();
if !ancestor.is::<HTMLFieldSetElement>() { continue; }
if !ancestor.downcast::<Element>().unwrap().get_disabled_state() { continue; }
if ancestor.is_parent_of(node) {
self.set_disabled_state(true);
self.set_enabled_state(false);
return;
}
match ancestor.children()
.find(|child| child.is::<HTMLLegendElement>())
{
Some(ref legend) => {
// XXXabinader: should we save previous ancestor to avoid this iteration?
if node.ancestors().any(|ancestor| ancestor == *legend) { continue; }
},
None => ()
}
self.set_disabled_state(true);
self.set_enabled_state(false);
return;
}
}
pub fn check_parent_disabled_state_for_option(&self) {
if self.get_disabled_state() { return; }
let node = self.upcast::<Node>();
if let Some(ref parent) = node.GetParentNode() {
if parent.is::<HTMLOptGroupElement>() && parent.downcast::<Element>().unwrap().get_disabled_state() {
self.set_disabled_state(true);
self.set_enabled_state(false);
}
}
}
pub fn check_disabled_attribute(&self) {
let has_disabled_attrib = self.has_attribute(&atom!("disabled"));
self.set_disabled_state(has_disabled_attrib);
self.set_enabled_state(!has_disabled_attrib);
}
}
#[derive(Clone, Copy, PartialEq)]
pub enum AttributeMutation<'a> {
/// The attribute is set, keep track of old value.
/// https://dom.spec.whatwg.org/#attribute-is-set
Set(Option<&'a AttrValue>),
/// The attribute is removed.
/// https://dom.spec.whatwg.org/#attribute-is-removed
Removed
}
impl<'a> AttributeMutation<'a> {
pub fn new_value<'b>(&self, attr: &'b Attr) -> Option<Ref<'b, AttrValue>> {
match *self {
AttributeMutation::Set(_) => Some(attr.value()),
AttributeMutation::Removed => None,
}
}
}<|fim▁end|> | let width_value = specified::LengthOrPercentageOrAuto::Length( |
<|file_name|>ConnectionHandler.java<|end_file_name|><|fim▁begin|>/*************************************************************************
* Copyright 2009-2014 Eucalyptus Systems, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
* Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta
* CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need
* additional information or have any questions.
*
* This file may incorporate work covered under the following copyright
* and permission notice:
*
* Software License Agreement (BSD License)
*
* Copyright (c) 2008, Regents of the University of California
* All rights reserved.
*
* Redistribution and use of this software in source and binary forms,
* with or without modification, are permitted provided that the
* following conditions are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE. USERS OF THIS SOFTWARE ACKNOWLEDGE
* THE POSSIBLE PRESENCE OF OTHER OPEN SOURCE LICENSED MATERIAL,
* COPYRIGHTED MATERIAL OR PATENTED MATERIAL IN THIS SOFTWARE,
* AND IF ANY SUCH MATERIAL IS DISCOVERED THE PARTY DISCOVERING
* IT MAY INFORM DR. RICH WOLSKI AT THE UNIVERSITY OF CALIFORNIA,
* SANTA BARBARA WHO WILL THEN ASCERTAIN THE MOST APPROPRIATE REMEDY,
* WHICH IN THE REGENTS' DISCRETION MAY INCLUDE, WITHOUT LIMITATION,
* REPLACEMENT OF THE CODE SO IDENTIFIED, LICENSING OF THE CODE SO
* IDENTIFIED, OR WITHDRAWAL OF THE CODE CAPABILITY TO THE EXTENT
* NEEDED TO COMPLY WITH ANY SUCH LICENSES OR RIGHTS.
*
* This file may incorporate work covered under the following copyright
* and permission notice:
*
* Copyright (c) 1999-2004, Brian Wellington.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
************************************************************************/
package com.eucalyptus.cloud.ws;
import static com.eucalyptus.util.dns.DnsResolvers.DnsRequest;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.Socket;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.log4j.Logger;
import org.xbill.DNS.CNAMERecord;
import org.xbill.DNS.Credibility;
import org.xbill.DNS.DClass;
import org.xbill.DNS.DNAMERecord;
import org.xbill.DNS.ExtendedFlags;
import org.xbill.DNS.Flags;
import org.xbill.DNS.Header;
import org.xbill.DNS.Message;
import org.xbill.DNS.Name;
import org.xbill.DNS.NameTooLongException;
import org.xbill.DNS.OPTRecord;
import org.xbill.DNS.Opcode;
import org.xbill.DNS.RRset;
import org.xbill.DNS.Rcode;
import org.xbill.DNS.Record;
import org.xbill.DNS.Section;
import org.xbill.DNS.SetResponse;
import org.xbill.DNS.TSIG;
import org.xbill.DNS.TSIGRecord;
import org.xbill.DNS.Type;
import com.eucalyptus.dns.Zone;
import com.eucalyptus.dns.Cache;
import com.eucalyptus.util.Pair;
import com.eucalyptus.util.dns.DnsResolvers;
import com.google.common.base.Function;
import com.google.common.base.Optional;
public class ConnectionHandler extends Thread {
static final int FLAG_DNSSECOK = 1;
static final int FLAG_SIGONLY = 2;
Map caches = new ConcurrentHashMap();
//Map TSIGs;
byte []
generateReply(Message query, byte [] in, int length, Socket s)
throws IOException
{
Header header;
boolean badversion;
int maxLength;
boolean sigonly;
SetResponse sr;
int flags = 0;
header = query.getHeader();
// if (header.getFlag(Flags.QR))
// return null;
if (header.getRcode() != Rcode.NOERROR)
return errorMessage(query, Rcode.FORMERR);
if (header.getOpcode() != Opcode.QUERY)
return errorMessage(query, Rcode.NOTIMP);
Record queryRecord = query.getQuestion();
TSIGRecord queryTSIG = query.getTSIG();
TSIG tsig = null;
/* if (queryTSIG != null) {
tsig = (TSIG) TSIGs.get(queryTSIG.getName());
if (tsig == null ||
tsig.verify(query, in, length, null) != Rcode.NOERROR)
return formerrMessage(in);
}*/
OPTRecord queryOPT = query.getOPT();
if (queryOPT != null && queryOPT.getVersion() > 0)
badversion = true;
if (s != null)
maxLength = 65535;
else if (queryOPT != null)
maxLength = Math.max(queryOPT.getPayloadSize(), 512);
else
maxLength = 512;
if (queryOPT != null && (queryOPT.getFlags() & ExtendedFlags.DO) != 0)
flags = FLAG_DNSSECOK;
Message response = new Message(query.getHeader().getID());
response.getHeader().setFlag(Flags.QR);
if (query.getHeader().getFlag(Flags.RD))
response.getHeader().setFlag(Flags.RD);
if(queryRecord != null) {
response.addRecord(queryRecord, Section.QUESTION);
<|fim▁hole|> int dclass = queryRecord.getDClass();
/* if (type == Type.AXFR && s != null)
return doAXFR(name, query, tsig, queryTSIG, s);
*/ if (!Type.isRR(type) && type != Type.ANY)
return errorMessage(query, Rcode.NOTIMP);
byte rcode = addAnswer(response, name, type, dclass, 0, flags);
if (rcode != Rcode.NOERROR && rcode != Rcode.NXDOMAIN)
return errorMessage(query, Rcode.SERVFAIL);
addAdditional(response, type, flags);
if (queryOPT != null) {
int optflags = (flags == FLAG_DNSSECOK) ? ExtendedFlags.DO : 0;
OPTRecord opt = new OPTRecord((short)4096, rcode, (byte)0, optflags);
response.addRecord(opt, Section.ADDITIONAL);
}
}
response.setTSIG(tsig, Rcode.NOERROR, queryTSIG);
return response.toWire(maxLength);
}
public Zone
findBestZone(Name name) {
Zone foundzone = null;
int labels = name.labels();
for (int i = 1; i < labels; i++) {
Name tname = new Name(name, i);
foundzone = (Zone) ZoneManager.getZone(tname);
if (foundzone != null)
return foundzone;
}
return null;
}
public RRset
findExactMatch(Name name, int type, int dclass, boolean glue) {
Zone zone = findBestZone(name);
if (zone != null)
return zone.findExactMatch(name, type);
else {
RRset [] rrsets;
Cache cache = getCache(dclass);
if (glue)
rrsets = cache.findAnyRecords(name, type);
else
rrsets = cache.findRecords(name, type);
if (rrsets == null)
return null;
else
return rrsets[0]; /* not quite right */
}
}
private void
addGlue(Message response, Name name, int type, int flags) {
RRset a = findExactMatch(name, type, DClass.IN, true);
if (a == null)
return;
addRRset(name, response, a, Section.ADDITIONAL, flags);
}
private void
addAdditional2(Message response, int section, int type, int flags) {
Record [] records = response.getSectionArray(section);
for (int i = 0; i < records.length; i++) {
Record r = records[i];
Name glueName = r.getAdditionalName();
if (glueName != null)
addGlue(response, glueName, type, flags);
}
}
private final void
addAdditional(Message response, int type, int flags) {
addAdditional2(response, Section.ANSWER, type, flags);
addAdditional2(response, Section.AUTHORITY, type, flags);
}
byte
addAnswer( final Message response, Name name, int type, int dclass,
int iterations, int flags)
{
SetResponse sr;
byte rcode = Rcode.NOERROR;
if (iterations > 6)
return Rcode.NOERROR;
if (type == Type.SIG || type == Type.RRSIG) {
type = Type.ANY;
flags |= FLAG_SIGONLY;
}
try {
sr = DnsResolvers.findRecords( response, new DnsRequest() {
@Override public Record getQuery() { return response.getQuestion( ); }
@Override public InetAddress getLocalAddress() { return ConnectionHandler.getLocalInetAddress(); }
@Override public InetAddress getRemoteAddress() { return ConnectionHandler.getRemoteInetAddress(); }
} );
if ( sr != null ) {
if ( sr.isSuccessful( ) ) {
return Rcode.NOERROR;
} else if ( sr.isNXDOMAIN( ) ) {
return Rcode.NXDOMAIN;
}
}
} catch ( Exception ex ) {
Logger.getLogger( DnsResolvers.class ).error( ex );
}
Zone zone = findBestZone(name);
if (zone != null) {
if (type == Type.AAAA) {
addSOA(response, zone);
response.getHeader().setFlag(Flags.AA);
return (Rcode.NOERROR);
}
sr = zone.findRecords(name, type, getLocalInetAddress( ));
}
else {
Cache cache = getCache(dclass);
sr = cache.lookupRecords(name, type, Credibility.NORMAL);
}
if (sr.isUnknown()) {
return (Rcode.SERVFAIL);
}
if (sr.isNXDOMAIN()) {
response.getHeader().setRcode(Rcode.NXDOMAIN);
if (zone != null) {
addSOA(response, zone);
if (iterations == 0)
response.getHeader().setFlag(Flags.AA);
}
rcode = Rcode.NXDOMAIN;
}
else if (sr.isNXRRSET()) {
if (zone != null) {
addSOA(response, zone);
if (iterations == 0)
response.getHeader().setFlag(Flags.AA);
}
}
else if (sr.isDelegation()) {
RRset nsRecords = sr.getNS();
addRRset(nsRecords.getName(), response, nsRecords,
Section.AUTHORITY, flags);
}
else if (sr.isCNAME()) {
CNAMERecord cname = sr.getCNAME();
RRset rrset = new RRset(cname);
addRRset(name, response, rrset, Section.ANSWER, flags);
if (zone != null && iterations == 0)
response.getHeader().setFlag(Flags.AA);
rcode = addAnswer(response, cname.getTarget(),
type, dclass, iterations + 1, flags);
}
else if (sr.isDNAME()) {
DNAMERecord dname = sr.getDNAME();
RRset rrset = new RRset(dname);
addRRset(name, response, rrset, Section.ANSWER, flags);
Name newname;
try {
newname = name.fromDNAME(dname);
}
catch (NameTooLongException e) {
return Rcode.YXDOMAIN;
}
if(newname != null) {
rrset = new RRset(new CNAMERecord(name, dclass, 0, newname));
addRRset(name, response, rrset, Section.ANSWER, flags);
if (zone != null && iterations == 0)
response.getHeader().setFlag(Flags.AA);
rcode = addAnswer(response, newname, type, dclass, iterations + 1, flags);
}
}
else if (sr.isSuccessful()) {
RRset [] rrsets = sr.answers();
if(rrsets != null) {
for (int i = 0; i < rrsets.length; i++)
addRRset(name, response, rrsets[i], Section.ANSWER, flags);
}
if (zone != null) {
addNS(response, zone, flags);
if (iterations == 0)
response.getHeader().setFlag(Flags.AA);
}
else
addCacheNS(response, getCache(dclass), name);
}
return rcode;
}
private final void
addSOA(Message response, Zone zone) {
response.addRecord(zone.getSOA(), Section.AUTHORITY);
}
private final void
addNS(Message response, Zone zone, int flags) {
RRset nsRecords = zone.getNS();
addRRset(nsRecords.getName(), response, nsRecords, Section.AUTHORITY, flags);
}
private final void
addCacheNS(Message response, Cache cache, Name name) {
SetResponse sr = cache.lookupRecords(name, Type.NS, Credibility.HINT);
if (!sr.isDelegation())
return;
RRset nsRecords = sr.getNS();
Iterator it = nsRecords.rrs();
while (it.hasNext()) {
Record r = (Record) it.next();
response.addRecord(r, Section.AUTHORITY);
}
}
byte []
doAXFR(Name name, Message query, TSIG tsig, TSIGRecord qtsig, Socket s) {
Zone zone = (Zone) ZoneManager.getZone(name);
boolean first = true;
if (zone == null)
return errorMessage(query, Rcode.REFUSED);
Iterator it = zone.AXFR();
try {
DataOutputStream dataOut;
dataOut = new DataOutputStream(s.getOutputStream());
int id = query.getHeader().getID();
while (it.hasNext()) {
RRset rrset = (RRset) it.next();
Message response = new Message(id);
Header header = response.getHeader();
header.setFlag(Flags.QR);
header.setFlag(Flags.AA);
addRRset(rrset.getName(), response, rrset,
Section.ANSWER, FLAG_DNSSECOK);
if (tsig != null) {
tsig.applyStream(response, qtsig, first);
qtsig = response.getTSIG();
}
first = false;
byte [] out = response.toWire();
dataOut.writeShort(out.length);
dataOut.write(out);
}
}
catch (IOException ex) {
System.out.println("AXFR failed");
}
try {
s.close();
}
catch (IOException ex) {
}
return null;
}
void
addRRset(Name name, Message response, RRset rrset, int section, int flags) {
for (int s = 1; s <= section; s++)
if (response.findRRset(name, rrset.getType(), s))
return;
if ((flags & FLAG_SIGONLY) == 0) {
Iterator it = rrset.rrs();
while (it.hasNext()) {
Record r = (Record) it.next();
if (r.getName().isWild() && !name.isWild())
r = r.withName(name);
response.addRecord(r, section);
}
}
if ((flags & (FLAG_SIGONLY | FLAG_DNSSECOK)) != 0) {
Iterator it = rrset.sigs();
while (it.hasNext()) {
Record r = (Record) it.next();
if (r.getName().isWild() && !name.isWild())
r = r.withName(name);
response.addRecord(r, section);
}
}
}
byte []
buildErrorMessage(Header header, int rcode, Record question) {
Message response = new Message();
response.setHeader(header);
for (int i = 0; i < 4; i++)
response.removeAllRecords(i);
if (rcode == Rcode.SERVFAIL)
response.addRecord(question, Section.QUESTION);
header.setRcode(rcode);
return response.toWire();
}
public byte []
errorMessage(Message query, int rcode) {
return buildErrorMessage(query.getHeader(), rcode,
query.getQuestion());
}
public byte []
formerrMessage(byte [] in) {
Header header;
try {
header = new Header(in);
}
catch (IOException e) {
return null;
}
return buildErrorMessage(header, Rcode.FORMERR, null);
}
public Cache
getCache(int dclass) {
Cache c = (Cache) caches.get(new Integer(dclass));
if (c == null) {
c = new Cache(dclass);
caches.put(new Integer(dclass), c);
}
return c;
}
private static final ThreadLocal<Pair<InetAddress,InetAddress>> localAndRemoteInetAddresses = new ThreadLocal<>();
private static InetAddress getInetAddress( final Function<Pair<InetAddress,InetAddress>,InetAddress> extractor ) {
return Optional.fromNullable( localAndRemoteInetAddresses.get( ) ).transform( extractor ).orNull( );
}
static InetAddress getLocalInetAddress( ) {
return getInetAddress( Pair.<InetAddress,InetAddress>left( ) );
}
static InetAddress getRemoteInetAddress( ) {
return getInetAddress( Pair.<InetAddress, InetAddress>right() );
}
static void setLocalAndRemoteInetAddresses( InetAddress local, InetAddress remote ) {
ConnectionHandler.localAndRemoteInetAddresses.set( Pair.pair( local, remote ) );
}
static void clearInetAddresses( ) {
ConnectionHandler.localAndRemoteInetAddresses.remove( );
}
}<|fim▁end|> | Name name = queryRecord.getName();
int type = queryRecord.getType(); |
<|file_name|>MapGen.py<|end_file_name|><|fim▁begin|>import math
import random
import GameData
from Util.TileTypes import *
from Util import Line, StarCallback
def initializeRandom( x, y ):
dist = math.sqrt( x ** 2 + y ** 2 )
angle = math.atan2( x, y ) / math.pi * 5
rand = ( random.random() * 7 ) - 3.5
val = ( ( dist + angle + rand ) % 10 )
if val > 5:
return 1
else:
return 0
def circle(x0, y0, radius, endRadius, cb):
stepSize = 1.0 / endRadius
angle = math.pi / 2
while angle >= 0:
c = math.cos( angle )<|fim▁hole|> cb( int( c * r ) + x0, int( s * r ) + y0 )
cb( int( s * r ) + x0, int( c * r ) + y0 )
cb(-int( c * r ) + x0, int( s * r ) + y0 )
cb(-int( s * r ) + x0, int( c * r ) + y0 )
cb( int( c * r ) + x0,-int( s * r ) + y0 )
cb( int( s * r ) + x0,-int( c * r ) + y0 )
cb(-int( c * r ) + x0,-int( s * r ) + y0 )
cb(-int( s * r ) + x0,-int( c * r ) + y0 )
r += 0.5
angle -= stepSize
def buildFixedWalls( self, I, _buffer, val ):
#Clear center room
centerX = int( self.width / 2 )
centerY = int( self.height / 2 )
for x in range( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, centerX + GameData.MapGen_CenterRoom_Size[0] + 1 ):
for y in range( centerY - GameData.MapGen_CenterRoom_Size[1] - 1, centerY + GameData.MapGen_CenterRoom_Size[1] + 1 ):
_buffer[ I( x, y ) ] = 0
#Build center room walls
for x in range( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, centerX + GameData.MapGen_CenterRoom_Size[0] + 1 ):
_buffer[ I( x, centerY - GameData.MapGen_CenterRoom_Size[1] - 1 ) ] = val
_buffer[ I( x, centerY + GameData.MapGen_CenterRoom_Size[1] ) ] = val
for y in range( centerY - GameData.MapGen_CenterRoom_Size[1] - 1, centerY + GameData.MapGen_CenterRoom_Size[1] + 1 ):
_buffer[ I( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, y ) ] = val
_buffer[ I( centerX + GameData.MapGen_CenterRoom_Size[0], y ) ] = val
def preIterInit( self, I, _buffer ):
#Outer wall
for x in range( self.width ):
_buffer[ I( x, 0 ) ] = 1
_buffer[ I( x, self.height - 1 ) ] = 1
for y in range( self.height ):
_buffer[ I( 0, y ) ] = 1
_buffer[ I( self.width - 1, y ) ] = 1
#Area around outer wall
for x in range( 1, self.width- 1 ):
_buffer[ I( x, 1 ) ] = 0
_buffer[ I( x, self.height - 2 ) ] = 0
for y in range( 1, self.height - 1 ):
_buffer[ I( 1, y ) ] = 0
_buffer[ I( self.width - 2, y ) ] = 0
buildFixedWalls( self, I, _buffer, 1 )
def postInit( self, I, _buffer ):
centerX = int( self.width / 2 )
centerY = int( self.height / 2 )
for x in range( self.width ):
for y in range( self.height ):
i = I( x, y )
val = _buffer[ i ]
if val == 0:
_buffer[ i ] = TILE_AIR #NOOP, but for clarity
elif val == 1:
_buffer[ i ] = TILE_WALL
else:
raise Exception( "Incorrect tile type in postInit!" )
for x in range( self.width ):
_buffer[ I( x, 0 ) ] = TILE_FIXED_WALL
_buffer[ I( x, self.height - 1 ) ] = TILE_FIXED_WALL
for y in range( self.height ):
_buffer[ I( 0, y ) ] = TILE_FIXED_WALL
_buffer[ I( self.width - 1, y ) ] = TILE_FIXED_WALL
buildFixedWalls( self, I, _buffer, TILE_FIXED_WALL )
curSurface = ( GameData.MapGen_CenterRoom_Size[0] * 2 ) * ( GameData.MapGen_CenterRoom_Size[1] * 2 )
curRadius = -1
def setFixedWall( x, y ):
_buffer[ I( int( x ), int( y ) ) ] = TILE_FIXED_WALL
circleNum = 0
while curRadius < GameData.MapGen_MaxCircleRadius:
sectionCount = max( circleNum * GameData.MapGen_CircleSectionsPerLayer, 1 )
nextSurface = curSurface + ( GameData.MapGen_BaseSurface * sectionCount )
nextRadius = int( math.sqrt( nextSurface / math.pi ) )
circle( centerX, centerY, nextRadius, nextRadius + 2, setFixedWall )
#Seperate sections in circle
if sectionCount > 1:
for i in range( sectionCount ):
angle = i * math.pi * 2 / sectionCount
s = math.sin( angle )
c = math.cos( angle )
Line( int( s * ( curRadius + 1 ) ) + centerX, int( c * ( curRadius + 1 ) ) + centerY, int( s * nextRadius ) + centerX, int( c * nextRadius ) + centerY, StarCallback( setFixedWall ) )
curRadius = nextRadius
curSurface = int( curRadius ** 2 * math.pi )
circleNum += 1
print( curRadius )
curRadius += 1
curRadiusSquared = curRadius ** 2
for x in range( self.width ):
for y in range( self.height ):
if ( ( x - centerX ) ** 2 + ( y - centerY ) ** 2 ) > curRadiusSquared:
_buffer[ I( x, y ) ] = TILE_AIR #NOOP, but for clarity<|fim▁end|> | s = math.sin( angle )
r = radius
while r < endRadius: |
<|file_name|>require.min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|> | oid sha256:505b4ccd47ed9526d0238c6f2d03a343ce476abc1c4aa79a9f22cabcbd0a3c16
size 12575 |
<|file_name|>cmd_user_info.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2010-2012 Kolab Systems AG (http://www.kolabsys.com)
#<|fim▁hole|># Jeroen van Meeuwen (Kolab Systems) <vanmeeuwen a kolabsys.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 3 or, at your option, any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
import commands
import pykolab
from pykolab import utils
from pykolab.translate import _
log = pykolab.getLogger('pykolab.cli')
conf = pykolab.getConf()
def __init__():
commands.register('user_info', execute, description="Display user information.")
def execute(*args, **kw):
from pykolab import wap_client
try:
user = conf.cli_args.pop(0)
except IndexError, errmsg:
user = utils.ask_question(_("User"))
# Create the authentication object.
# TODO: Binds with superuser credentials!
wap_client.authenticate(username=conf.get("ldap", "bind_dn"), password=conf.get("ldap", "bind_pw"))
user = wap_client.user_info(user)
print user<|fim▁end|> | |
<|file_name|>AbstractDerivedDecimalType.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013-2015 Josef Hardi <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and<|fim▁hole|> * limitations under the License.
*/
package com.obidea.semantika.datatype;
import com.obidea.semantika.datatype.exception.InvalidLexicalFormException;
import com.obidea.semantika.datatype.primitive.XsdDecimal;
public abstract class AbstractDerivedDecimalType extends AbstractXmlType<Number>
{
protected AbstractDerivedDecimalType(String name)
{
super(name);
}
@Override
public IDatatype<?> getPrimitiveDatatype()
{
return XsdDecimal.getInstance();
}
@Override
public Number getValue(String lexicalForm)
{
return parseLexicalForm(lexicalForm);
}
@Override
public boolean isPrimitive()
{
return false;
}
/**
* Parse and validate a lexical form of the literal.
*
* @param lexicalForm
* the typing form of the literal.
* @return A <code>Number</code> representation of the literal
* @throws InvalidLexicalFormException
* if the literal form is invalid or the value is out of range
*/
protected abstract Number parseLexicalForm(String lexicalForm) throws InvalidLexicalFormException;
}<|fim▁end|> | |
<|file_name|>pandas_crosstab.py<|end_file_name|><|fim▁begin|>import pandas as pd
df = pd.read_csv('data/src/titanic_train.csv', index_col=0).drop(['Name', 'Ticket', 'SibSp', 'Parch'], axis=1)
print(df.head())
# Survived Pclass Sex Age Fare Cabin Embarked
# PassengerId
# 1 0 3 male 22.0 7.2500 NaN S
# 2 1 1 female 38.0 71.2833 C85 C
# 3 1 3 female 26.0 7.9250 NaN S
# 4 1 1 female 35.0 53.1000 C123 S
# 5 0 3 male 35.0 8.0500 NaN S
print(pd.crosstab(df['Sex'], df['Pclass']))
# Pclass 1 2 3
# Sex
# female 94 76 144
# male 122 108 347
print(type(pd.crosstab(df['Sex'], df['Pclass'])))
# <class 'pandas.core.frame.DataFrame'>
print(pd.crosstab([df['Sex'], df['Survived']], [df['Pclass'], df['Embarked']]))
# Pclass 1 2 3
# Embarked C Q S C Q S C Q S
# Sex Survived
# female 0 1 0 2 0 0 6 8 9 55
# 1 42 1 46 7 2 61 15 24 33
# male 0 25 1 51 8 1 82 33 36 231
# 1 17 0 28 2 0 15 10 3 34
print(pd.crosstab([df['Sex'], df['Survived']], [df['Pclass'], df['Embarked']],
margins=True))
# Pclass 1 2 3 All
# Embarked C Q S C Q S C Q S
# Sex Survived
# female 0 1 0 2 0 0 6 8 9 55 81
# 1 42 1 46 7 2 61 15 24 33 231
# male 0 25 1 51 8 1 82 33 36 231 468
# 1 17 0 28 2 0 15 10 3 34 109
# All 85 2 127 17 3 164 66 72 353 889
print(pd.crosstab([df['Sex'], df['Survived']], [df['Pclass'], df['Embarked']],
margins=True, margins_name='Total'))
# Pclass 1 2 3 Total
# Embarked C Q S C Q S C Q S
# Sex Survived
# female 0 1 0 2 0 0 6 8 9 55 81
# 1 42 1 46 7 2 61 15 24 33 231
# male 0 25 1 51 8 1 82 33 36 231 468
# 1 17 0 28 2 0 15 10 3 34 109
# Total 85 2 127 17 3 164 66 72 353 889
print(pd.crosstab(df['Sex'], df['Pclass'], margins=True, normalize=True))
# Pclass 1 2 3 All
# Sex
# female 0.105499 0.085297 0.161616 0.352413
# male 0.136925 0.121212 0.389450 0.647587
# All 0.242424 0.206510 0.551066 1.000000<|fim▁hole|>print(pd.crosstab(df['Sex'], df['Pclass'], margins=True, normalize='index'))
# Pclass 1 2 3
# Sex
# female 0.299363 0.242038 0.458599
# male 0.211438 0.187175 0.601386
# All 0.242424 0.206510 0.551066
print(pd.crosstab(df['Sex'], df['Pclass'], margins=True, normalize='columns'))
# Pclass 1 2 3 All
# Sex
# female 0.435185 0.413043 0.293279 0.352413
# male 0.564815 0.586957 0.706721 0.647587
# print(pd.crosstab(df['Sex'], [df['Pclass'], df['Embarked']],
# margins=True, normalize=True))
# TypeError: Expected tuple, got str
print(pd.crosstab(df['Sex'], [df['Pclass'], df['Embarked']],
margins=True, normalize='index'))
# Pclass 1 2 \
# Embarked C Q S C Q S
# Sex
# female 0.137821 0.003205 0.153846 0.022436 0.006410 0.214744
# male 0.072790 0.001733 0.136915 0.017331 0.001733 0.168111
# All 0.095613 0.002250 0.142857 0.019123 0.003375 0.184477
# Pclass 3
# Embarked C Q S
# Sex
# female 0.073718 0.105769 0.282051
# male 0.074523 0.067591 0.459272
# All 0.074241 0.080990 0.397075
# print(pd.crosstab(df['Sex'], [df['Pclass'], df['Embarked']],
# margins=True, normalize='columns'))
# ValueError: Length of new names must be 1, got 2
print(pd.crosstab(df['Sex'], [df['Pclass'], df['Embarked']], normalize=True))
# Pclass 1 2 \
# Embarked C Q S C Q S
# Sex
# female 0.048369 0.001125 0.053993 0.007874 0.002250 0.075366
# male 0.047244 0.001125 0.088864 0.011249 0.001125 0.109111
# Pclass 3
# Embarked C Q S
# Sex
# female 0.025872 0.03712 0.098988
# male 0.048369 0.04387 0.298088
print(pd.crosstab(df['Sex'], [df['Pclass'], df['Embarked']], normalize='index'))
# Pclass 1 2 \
# Embarked C Q S C Q S
# Sex
# female 0.137821 0.003205 0.153846 0.022436 0.006410 0.214744
# male 0.072790 0.001733 0.136915 0.017331 0.001733 0.168111
# Pclass 3
# Embarked C Q S
# Sex
# female 0.073718 0.105769 0.282051
# male 0.074523 0.067591 0.459272
print(pd.crosstab(df['Sex'], [df['Pclass'], df['Embarked']], normalize='columns'))
# Pclass 1 2 3 \
# Embarked C Q S C Q S C
# Sex
# female 0.505882 0.5 0.377953 0.411765 0.666667 0.408537 0.348485
# male 0.494118 0.5 0.622047 0.588235 0.333333 0.591463 0.651515
# Pclass
# Embarked Q S
# Sex
# female 0.458333 0.249292
# male 0.541667 0.750708<|fim▁end|> | |
<|file_name|>attribute-form-option-row-view.js<|end_file_name|><|fim▁begin|>define(function(require) {
'use strict';
const _ = require('underscore');
const $ = require('jquery');
const __ = require('orotranslation/js/translator');
const BaseView = require('oroui/js/app/views/base/view');
const Confirmation = require('oroui/js/delete-confirmation');
const AttributeFormOptionRowView = BaseView.extend({
tagName: 'tr',
events: {
'click .delete-form-option': 'triggerRemove',
'click .edit-form-option': 'triggerEdit'
},
options: {
template: null,
data: {
label: null,
property_path: null,
required: false
}
},
/**
* @inheritDoc
*/
constructor: function AttributeFormOptionRowView(options) {
AttributeFormOptionRowView.__super__.constructor.call(this, options);
},
/**
* @inheritDoc
*/<|fim▁hole|> initialize: function(options) {
this.options = _.defaults(options || {}, this.options);
const template = this.options.template || $('#attribute-form-option-row-template').html();
this.template = _.template(template);
},
update: function(data) {
this.options.data = data;
this.render();
},
triggerEdit: function(e) {
e.preventDefault();
this.trigger('editFormOption', this.options.data);
},
triggerRemove: function(e) {
e.preventDefault();
const confirm = new Confirmation({
content: __('Are you sure you want to delete this field?')
});
confirm.on('ok', _.bind(function() {
this.trigger('removeFormOption', this.options.data);
}, this));
confirm.open();
},
getTemplateData: function() {
return this.options.data;
}
});
return AttributeFormOptionRowView;
});<|fim▁end|> | |
<|file_name|>create_sleuth_condor_batch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import argparse
from os.path import join, abspath
from sh import which
import sleuth_automation as sleuth
from jinja2 import Environment, PackageLoader
description = """
This script will create a condor submit file for a batch of SLEUTH runs.
"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--sleuth_path', required=True,
help='path to SLEUTH directory')
parser.add_argument('--region_dir',
required=True,
help='path to region dir containing locations')
parser.add_argument('--mpi_cores', default=0, type=int,
help="""number of cores available for MPI,
if 0 (default) don't use mpi""")
parser.add_argument('--predict_end', type=int, required=True,
help='ending year of prediction interval')<|fim▁hole|>
if args.mpi_cores > 0:
sleuth.configure(sleuth_path=args.sleuth_path,
use_mpi=True, mpi_cores=args.mpi_cores)
else:
sleuth.configure(sleuth_path=args.sleuth_path,
use_mpi=False)
r = sleuth.Region(region_dir=args.region_dir,
predict_end=args.predict_end,
monte_carlo_iterations=args.montecarlo_iterations)
print("wrote " + r.build())<|fim▁end|> | parser.add_argument('--montecarlo_iterations', type=int, default=50,
help='monte carlo iterations')
args = parser.parse_args()
|
<|file_name|>PartListElementJsonUnmarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glacier.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.glacier.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* PartListElement JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class PartListElementJsonUnmarshaller implements Unmarshaller<PartListElement, JsonUnmarshallerContext> {
public PartListElement unmarshall(JsonUnmarshallerContext context) throws Exception {
PartListElement partListElement = new PartListElement();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("RangeInBytes", targetDepth)) {
context.nextToken();<|fim▁hole|> partListElement.setRangeInBytes(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("SHA256TreeHash", targetDepth)) {
context.nextToken();
partListElement.setSHA256TreeHash(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return partListElement;
}
private static PartListElementJsonUnmarshaller instance;
public static PartListElementJsonUnmarshaller getInstance() {
if (instance == null)
instance = new PartListElementJsonUnmarshaller();
return instance;
}
}<|fim▁end|> | |
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>use std::error::Error;
use std::fmt;
use unicode_width::UnicodeWidthStr;
#[derive(Clone)]
pub enum AST {
Output,
Input,
Loop(Vec<AST>),
Right,
Left,
Inc,
Dec,
}
#[derive(Debug)]
pub enum ParseErrorType {
UnclosedLoop,
ExtraCloseLoop,
}
use ParseErrorType::*;
#[derive(Debug)]
pub struct ParseError {
err: ParseErrorType,
line: Vec<u8>,
linenum: usize,
offset: usize,
}
impl ParseError {
fn new(err: ParseErrorType, code: &[u8], i: usize) -> Self {
let (line, linenum, offset) = find_line(code, i);
Self {
err,
line: line.into(),
linenum,
offset,
}
}
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let line = String::from_utf8_lossy(&self.line);
let width = UnicodeWidthStr::width(&line[0..self.offset]);
match self.err {
UnclosedLoop => {
writeln!(f, "reached EOF with unterminated loop")?;
writeln!(f, "Loop started at {}:{}", self.linenum, self.offset)?;
}
ExtraCloseLoop => {
writeln!(
f,
"[ found at {}:{} when not in a loop",
self.linenum, self.offset
)?;
}
};
writeln!(f, "{}", line)?;
write!(f, "{}^", " ".repeat(width))?;
Ok(())
}
}
impl Error for ParseError {}
/// Parses a string of brainfuck code to unoptimized AST
pub fn parse(code: &[u8]) -> Result<Vec<AST>, ParseError> {
let mut i = 0;
_parse(code, &mut i, 0)
}
<|fim▁hole|>
let mut tokens = Vec::new();
while let Some(c) = code.get(*i) {
*i += 1;
match c {
b'+' => tokens.push(AST::Inc),
b'-' => tokens.push(AST::Dec),
b'>' => tokens.push(AST::Right),
b'<' => tokens.push(AST::Left),
b'[' => tokens.push(AST::Loop(_parse(code, i, level + 1)?)),
b']' => {
return if level == 0 {
Err(ParseError::new(ExtraCloseLoop, code, *i - 1))
} else {
Ok(tokens)
};
}
b',' => tokens.push(AST::Input),
b'.' => tokens.push(AST::Output),
_ => (),
};
}
if level != 0 {
Err(ParseError::new(UnclosedLoop, code, start))
} else {
Ok(tokens)
}
}
fn find_line(code: &[u8], i: usize) -> (&[u8], usize, usize) {
let offset = code[0..i].iter().rev().take_while(|x| **x != b'\n').count();
let end = i + code[i..].iter().take_while(|x| **x != b'\n').count();
let linenum = code[0..(i - offset)]
.iter()
.filter(|x| **x == b'\n')
.count();
(&code[(i - offset)..end], linenum, offset)
}<|fim▁end|> | fn _parse(code: &[u8], i: &mut usize, level: u32) -> Result<Vec<AST>, ParseError> {
// Starting [ of the loop
let start = i.saturating_sub(1); |
<|file_name|>post.js<|end_file_name|><|fim▁begin|>// (C) Copyright 2014-2016 Hewlett Packard Enterprise Development Company, L.P.
import path from 'path';
import fecha from 'fecha';
import lunr from 'lunr';
import GithubPostDAO from '../persistance/GithubPostDAO';
import PostDAO from '../persistance/PostDAO';
export function loadPosts () {
return new PostDAO().getAll();
}
export function getPostById (id) {
return new PostDAO().getById(id);
}
export function postsMonthMap (posts) {
return posts.reduce((postMap, post) => {
let monthLabel = fecha.format(
new Date(post.createdAt), 'MMMM, YYYY'
);
if (postMap.hasOwnProperty(monthLabel)) {
postMap[monthLabel].push(post);
} else {
postMap[monthLabel] = [post];
}
return postMap;
}, {});
}
export function filterPostsMapByMonth (postsByMonth, year, month) {
let monthLabel = fecha.format(
new Date(year, month - 1), 'MMMM, YYYY'
);
let archive = {};
if (monthLabel in postsByMonth) {
archive[monthLabel] = postsByMonth[monthLabel];
}
return archive;
}
export function buildSearchIndex (posts) {
const index = lunr(function () {
this.field('title', {boost: 10});
this.field('author', {boost: 2});
this.field('content', {boost: 5});
this.field('tags');
this.ref('id');
});
posts.forEach((post) => index.add(post));
return index;
}
export function addPost (content, metadata, images) {
const titleId = metadata.title
.replace(/ /g, '-').replace(/[^a-zA-Z0-9\-]/g, '').toLowerCase();
const today = new Date();
const idDateFormat = fecha.format(today, 'YYYY/MM/DD');
const folderDateFormat = fecha.format(today, 'YYYY-MM-DD');
metadata.id = `${idDateFormat}/${titleId}`;
metadata.createdAt = today;
const postFolderName = `${folderDateFormat}__${titleId}`;
if (process.env.BLOG_PERSISTANCE === 'github') {
return new GithubPostDAO(postFolderName, content, metadata, images).add();
} else {
return new PostDAO(postFolderName, content, metadata, images).add(
path.resolve(path.join(__dirname, '../../'))
);
}
}
export function editPost (content, metadata, images) {
const titleId = metadata.title
.replace(/ /g, '-').replace(/[^a-zA-Z0-9\-]/g, '').toLowerCase();
const folderDateFormat = fecha.format(
new Date(metadata.createdAt), 'YYYY-MM-DD'
);
const postFolderName = `${folderDateFormat}__${titleId}`;
if (process.env.BLOG_PERSISTANCE === 'github') {
return new GithubPostDAO(postFolderName, content, metadata, images).edit();
} else {
return new PostDAO(postFolderName, content, metadata, images).edit(
path.resolve(path.join(__dirname, '../../'))
);
}
}
function getPostFolderName (id) {
const idGroup = id.split('/');
const postTitle = idGroup[idGroup.length - 1];
idGroup.pop();
const postDate = idGroup.join('-');
return `${postDate}__${postTitle}`;
}
export function deletePost (id) {
const postFolderName = getPostFolderName(id);
if (process.env.BLOG_PERSISTANCE === 'github') {
return new GithubPostDAO(postFolderName).delete();
} else {
return new PostDAO(postFolderName).delete(
path.resolve(path.join(__dirname, '../../'))
);
}
}
export function getAllPosts () {
return new GithubPostDAO().getAll();
}
export function cancelChange (post) {
const titleId = post.title
.replace(/ /g, '-').replace(/[^a-zA-Z0-9\-]/g, '').toLowerCase();<|fim▁hole|> );
const postFolderName = `${folderDateFormat}__${titleId}`;
return new GithubPostDAO(postFolderName).cancelChange(post.action);
}
export function getPendingPost (id) {
const postFolderName = getPostFolderName(id);
return new GithubPostDAO(postFolderName).getPending();
}
export function getImageAsBase64 (imagePath) {
const postPath = imagePath.split('server/posts/')[1];
const postFolderGroup = postPath.split('/images/');
const postFolderName = postFolderGroup[0];
const imageName = decodeURI(postFolderGroup[1]);
return new GithubPostDAO(postFolderName).getImageAsBase64(imageName);
}<|fim▁end|> | const folderDateFormat = fecha.format(
new Date(post.createdAt), 'YYYY-MM-DD' |
<|file_name|>fundamental-react-tests.tsx<|end_file_name|><|fim▁begin|>import {
ActionBar,
Avatar,
Breadcrumb,
BusyIndicator,
Button,
ButtonGroup,
Calendar,
ComboboxInput,
Counter,
DatePicker,
Dialog,
Checkbox,
FormFieldset,
FormGroup,
FormInput,
FormItem,
FormLabel,
FormLegend,
FormRadioGroup,
FormRadioItem,
FormSelect,
FormSet,
FormTextarea,
Icon,
Image,
InfoLabel,
InlineHelp,
InputGroup,
LayoutGrid,
LayoutPanel,
Link,
List,
LocalizationEditor,
Menu,
MessageStrip,
MultiInput,
ObjectStatus,
Pagination,
Popover,
SearchInput,
Select,
Shellbar,
SideNav,
StepInput,
Switch,
Tab,
TabGroup,
Table,
Tile,
Time,
TimePicker,
Title,
Token,
TreeView,
} from "fundamental-react";
import * as React from "react";
const counter = <Counter>101</Counter>;
const actionBars = (
<div>
<ActionBar
actions={(
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url='#'>Option 1</Menu.Item>
<Menu.Item url='#'>Option 2</Menu.Item>
<Menu.Item url='#'>Option 3</Menu.Item>
<Menu.Item url='#'>Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Button glyph='vertical-grip' option='transparent' />}
placement='bottom-end' />
)}
description={'Action Bar Description'}
title={'Page Title'}
/>
<ActionBar
actions={(
<>
<Button>Button</Button>
<Button option='emphasized'>Button</Button>
</>
)}
description={'Action Bar Description'}
title={'Page Title'}
/>
<ActionBar
actions={(
<>
<Button>Button</Button>
<Button option='emphasized'>Button</Button>
</>
)}
description={'Action Bar Description'}
onBackClick={() => null}
title={'Page Title'}
/>
</div>
);
const avatar = (
<Avatar color={1} tile={false} />
);
const breadcrumb = (
<Breadcrumb>
<Breadcrumb.Item name="Link Text" url="#" />
<Breadcrumb.Item name="Link Text" url="#" />
<Breadcrumb.Item name="Link Text" url="#" />
</Breadcrumb>
);
const busyIndicator = (
<BusyIndicator show/>
);
const buttons = (
<div>
<Button onClick={function w() {}} option="emphasized">
Emphasized Button
</Button>
<Button onClick={function w() {}}>Regular Button</Button>
<Button onClick={function w() {}} option="transparent">
Light Button
</Button>
<Button>Action Button</Button>
<Button type="standard">Standard Button</Button>
<Button type="positive">Positive Button</Button>
<Button type="medium">Medium Button</Button>
<Button type="negative">Negative Button</Button>
<Button glyph="cart" option="emphasized">
Add to Cart
</Button>
<Button glyph="cart">Add to Cart</Button>
<Button glyph="filter" option="transparent">
Add to Cart
</Button>
<Button glyph="accept" option="emphasized" type="positive">
Approve
</Button>
<Button glyph="decline" option="emphasized" type="negative">
Reject
</Button>
<Button glyph="alert" option="emphasized" type="medium">
Review
</Button>
<br />
<br />
<br />
<Button glyph="cart" option="emphasized" />
<Button glyph="cart" />
<Button glyph="filter" option="transparent" />
<Button glyph="accept" option="emphasized" type="positive" />
<Button glyph="decline" option="emphasized" type="negative" />
<Button glyph="alert" option="emphasized" type="medium" />
<Button>Default</Button>
<Button compact>Compact</Button>
<Button option="emphasized">Normal State</Button>
<Button option="emphasized" selected>
Selected State
</Button>
<Button disabled option="emphasized">
Disabled State
</Button>
<br />
<br />
<Button>Normal State</Button>
<Button selected>Selected State</Button>
<Button disabled>Disabled State</Button>
<br />
<br />
<Button option="transparent">Normal State</Button>
<Button option="transparent" selected>
Selected State
</Button>
<Button disabled option="transparent">
Disabled State
</Button>
<br />
<br />
<Button type="standard">Normal State</Button>
<Button selected type="standard">
Selected State
</Button>
<Button disabled type="standard">
Disabled State
</Button>
<br />
<br />
<Button type="positive">Normal State</Button>
<Button selected type="positive">
Selected State
</Button>
<Button disabled type="positive">
Disabled State
</Button>
<br />
<br />
<Button type="medium">Normal State</Button>
<Button selected type="medium">
Selected State
</Button>
<Button disabled type="medium">
Disabled State
</Button>
<br />
<br />
<Button type="negative">Normal State</Button>
<Button selected type="negative">
Selected State
</Button>
<Button disabled type="negative">
Disabled State
</Button>
</div>
);
const buttonGroups = (
<div>
<ButtonGroup>
<Button glyph="survey" />
<Button glyph="pie-chart" selected />
<Button glyph="pool" />
</ButtonGroup>
<ButtonGroup>
<Button compact>Left</Button>
<Button compact selected>
Middle
</Button>
<Button compact>Right</Button>
</ButtonGroup>
</div>
);
const calendars = (
<div>
<Calendar />
<Calendar
disableBeforeDate={new Date("2018-08-02T22:00:00.000Z")}
disableWeekends
/>
<Calendar
blockedDates={[
new Date("2018-01-31T23:00:00.000Z"),
new Date("2018-04-02T22:00:00.000Z")
]}
disableWeekday={["Monday", "Tuesday"]}
/>
<Calendar enableRangeSelection />
</div>
);
const comboboxInput = (
<ComboboxInput
list=''
menu={
<Menu>
<Menu.List>
<Menu.Item url="/">Pear</Menu.Item>
<Menu.Item url="/">Strawberry</Menu.Item>
<Menu.Item url="/">Raspberry</Menu.Item>
<Menu.Item isLink url="/">
+ New Item
</Menu.Item>
</Menu.List>
</Menu>
}
placeholder="Select Fruit"
/>
);
const contextualMenus = (
<div>
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Button glyph="vertical-grip" option="transparent" />}
noArrow
/>
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Button>More</Button>}
noArrow
/>
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Button option="transparent">More</Button>}
noArrow
/>
</div>
);
const datePickers = (
<div>
<DatePicker
disableBeforeDate={new Date("2018-12-23T23:00:00.000Z")}
disableWeekends
/>
<DatePicker
blockedDates={[
new Date("2018-11-30T23:00:00.000Z"),
new Date("2018-12-22T23:00:00.000Z")
]}
compact
disableWeekday={["Monday", "Tuesday"]}
/>
<DatePicker disableFutureDates enableRangeSelection />
<DatePicker compact disablePastDates enableRangeSelection />
</div>
);
const dialogs = (
<div>
<button className="fd-button" onClick={function w() {}}>
Show Information Modal
</button>
<Dialog actions={[]} onClose={function w() {}} title="Product Added">
<div>
<b>The new product have been added to your catalog.</b>
<br />
<br />
Automatic Product ID:
<b>PD-3465334</b>
<br />
<br />
Expiration date:
<b>13/03/2018</b>
<br />
<br />
</div>
</Dialog>
<Dialog
actions={[
<>
<Button onClick={function w() {}} type="standard">
No Way
</Button>
<Button onClick={function w() {}}>Sure</Button>
</>
]}
onClose={function w() {}}
title="Delete"
>
<div>
Do you want to delete item
<b>X</b>?
</div>
</Dialog>
<Dialog
actions={[
<>
<Button onClick={function w() {}} type="standard">
Cancel
</Button>
<Button onClick={function w() {}}>Invite</Button>
</>
]}
onClose={function w() {}}
title="Invite user"
>
<div className="fd-form__group">
<div className="fd-form__item">
<label className="fd-form__label is-required">Email</label>
<input
className="fd-form__control"
onChange={function w() {}}
type="text"
value=""
/>
</div>
</div>
</Dialog>
</div>
);
const forms = (
<div>
<FormSet>
<FormItem>
<FormLabel htmlFor="input-1">Default Input</FormLabel>
<FormInput
id="input-1"
placeholder="Field placeholder text"
type="text"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="input-2" required>
Required Input
</FormLabel>
<FormInput
id="input-2"
placeholder="Field placeholder text"
type="text"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="input-3" required>
Password
</FormLabel>
<FormInput
id="input-3"
placeholder="Field placeholder text"
type="password"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="textarea-1" required>
Text area
</FormLabel>
<FormTextarea
defaultValue=" Pellentesque metus lacus commodo eget justo ut rutrum varius nunc."
id="textarea-1"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="input-4">
Input with inline help
<span className="fd-inline-help fd-has-float-right">
<span className="fd-inline-help__content fd-inline-help__content--bottom-right">
Lorem ipsum dolor sit amet, consectetur adipiscing.
</span>
</span>
</FormLabel>
<FormInput id="input-4" type="text" />
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="input-5">Input with Help Message</FormLabel>
<FormInput id="input-5" type="text" />
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="OatmD552">Normal Input</FormLabel>
<FormInput
id="OatmD552"
placeholder="Field placeholder text"
type="text"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="OatmD553">Valid Input</FormLabel>
<FormInput
id="OatmD553"
placeholder="Field placeholder text"
state="valid"
type="text"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="OatmD554">Invalid Input</FormLabel>
<FormInput
id="OatmD554"
placeholder="Field placeholder text"
state="invalid"
type="text"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="OatmD555">Warning Input</FormLabel>
<FormInput
id="OatmD555"
placeholder="Field placeholder text"
state="warning"
type="text"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="OatmD557">Disabled Input</FormLabel>
<FormInput
disabled
id="OatmD557"
placeholder="Field placeholder text"
type="text"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="OatmD558">Readonly Input</FormLabel>
<FormInput
id="OatmD558"
placeholder="Field placeholder text"
readOnly
type="text"
/>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="select-1">Default Select</FormLabel>
<FormSelect id="select-1">
<option value="1">
Duis malesuada odio volutpat elementum
</option>
<option value="2">Suspendisse ante ligula</option>
<option value="3">
Sed bibendum sapien at posuere interdum
</option>
</FormSelect>
</FormItem>
</FormSet>
<FormSet>
<FormItem>
<FormLabel htmlFor="select-1">Default Select</FormLabel>
<FormSelect disabled id="select-1">
<option value="1">
Duis malesuada odio volutpat elementum
</option>
<option value="2">Suspendisse ante ligula</option>
<option value="3">
Sed bibendum sapien at posuere interdum
</option>
</FormSelect>
</FormItem>
</FormSet>
<FormFieldset>
<FormLegend>Radio buttons</FormLegend>
<FormRadioGroup>
<FormRadioItem id="radio-1">Option 1</FormRadioItem>
<FormRadioItem id="radio-2">Option 2</FormRadioItem>
<FormRadioItem defaultChecked id="radio-3">
Option 3
</FormRadioItem>
</FormRadioGroup>
</FormFieldset>
<FormFieldset>
<FormLegend>Radio buttons disabled</FormLegend>
<FormRadioGroup>
<FormRadioItem defaultChecked disabled id="radio-4">
Option 1
</FormRadioItem>
<FormRadioItem disabled id="radio-5">
Option 2
</FormRadioItem>
<FormRadioItem disabled id="radio-6">
Option 3
</FormRadioItem>
</FormRadioGroup>
</FormFieldset>
<FormFieldset>
<FormLegend>Inline Radio buttons</FormLegend>
<FormRadioGroup inline>
<FormRadioItem id="radio-7">Option 1</FormRadioItem>
<FormRadioItem defaultChecked id="radio-8">
Option 2
</FormRadioItem>
<FormRadioItem id="radio-9">Option 3</FormRadioItem>
</FormRadioGroup>
</FormFieldset>
<FormFieldset>
<FormLegend>Checkboxes</FormLegend>
<FormItem isCheck>
<FormInput
id="checkbox-1"
name="checkbox-name-1"
type="checkbox"
value=""
/>
<FormLabel htmlFor="checkbox-1">Option One</FormLabel>
</FormItem>
<FormItem isCheck>
<FormInput
id="checkbox-2"
name="checkbox-name-2"
type="checkbox"
value=""
/>
<FormLabel htmlFor="checkbox-2">Option Two</FormLabel>
</FormItem>
<FormItem isCheck>
<FormInput
id="checkbox-3"
name="checkbox-name-3"
type="checkbox"
value=""
/>
<FormLabel htmlFor="checkbox-3">Option Three</FormLabel>
</FormItem>
</FormFieldset>
<FormFieldset>
<FormLegend>Inline Checkbox buttons</FormLegend>
<FormItem isCheck isInline>
<FormLabel htmlFor="checkbox-4">
<FormInput
id="checkbox-4"
name="checkbox-name-4"
type="checkbox"
value=""
/>
Option One
</FormLabel>
</FormItem>
<FormItem isCheck isInline>
<FormLabel htmlFor="checkbox-5">
<FormInput
id="checkbox-5"
name="checkbox-name-5"
type="checkbox"
value=""
/>
Option Two
</FormLabel>
</FormItem>
<FormItem isCheck isInline>
<FormLabel htmlFor="checkbox-6">
<Checkbox
id="checkbox-6"
name="checkbox-name-6"
type="checkbox"
value=""
/>
Option Three
</FormLabel>
</FormItem>
</FormFieldset>
</div>
);
const icons = (
<div>
<Icon glyph="cart" size="s" />
<Icon glyph="cart" />
<Icon glyph="cart" size="m" />
<Icon glyph="cart" size="l" />
<Icon glyph="cart" size="xl" />
</div>
);
const images = (
<div>
<Image photo="https://placeimg.com/400/400/nature" size="s" />
<Image photo="https://placeimg.com/400/400/nature" size="m" />
<Image photo="https://placeimg.com/400/400/nature" size="l" />
<Image
photo="https://placeimg.com/400/400/nature"
size="s"
type="circle"
/>
<Image
photo="https://placeimg.com/400/400/nature"
size="m"
type="circle"
/>
<Image
photo="https://placeimg.com/400/400/nature"
size="l"
type="circle"
/>
</div>
);
const infoLabels = (
<div>
<InfoLabel>Default</InfoLabel>
<InfoLabel glyph='key'>Icon</InfoLabel>
<InfoLabel glyph='upload-to-cloud' />
<InfoLabel numeric>1</InfoLabel>
<InfoLabel numeric>10000</InfoLabel>
<InfoLabel color={1}>Default</InfoLabel>
<InfoLabel color={2}>Default</InfoLabel>
<InfoLabel color={3}>Default</InfoLabel>
<InfoLabel color={4}>Default</InfoLabel>
<InfoLabel color={5}>Default</InfoLabel>
<InfoLabel color={6}>Default</InfoLabel>
<InfoLabel color={7}>Default</InfoLabel>
<InfoLabel color={8}>Default</InfoLabel>
<InfoLabel color={9}>Default</InfoLabel>
<InfoLabel color={10}>Default</InfoLabel>
</div>
);
const inlineHelp = (
<div>
Bottom Right (Default)
<InlineHelp
placement="bottom-right"
text="Lorem ipsum dolor sit amet, consectetur adipiscing."
/>
</div>
);
const formGroup = <FormGroup />;
const inputGroup = (
<div>
<FormGroup>
<FormLabel>Left Aligned Text Addon</FormLabel>
<FormItem>
<InputGroup
addon="$"
addonPos="before"
inputValue="1234567890"
/>
</FormItem>
</FormGroup>
<br />
<FormGroup>
<FormLabel>Right Aligned Text Addon</FormLabel>
<FormItem>
<InputGroup addon="€" inputValue="1234567890" />
</FormItem>
</FormGroup>
<br />
<p>Compact mode</p>
<FormGroup>
<FormLabel>Left Aligned Text Addon</FormLabel>
<FormItem>
<InputGroup
addon="$"
addonPos="before"
compact
inputValue="1234567890"
/>
</FormItem>
</FormGroup>
<br />
<FormGroup>
<FormLabel>Right Aligned Text Addon</FormLabel>
<FormItem>
<InputGroup addon="€" compact inputValue="1234567890" />
</FormItem>
</FormGroup>
<FormGroup>
<FormLabel>Right Aligned Text Addon</FormLabel>
<FormItem>
<InputGroup inputType="number" inputValue={100} />
</FormItem>
</FormGroup>
<br />
<p>Compact mode</p>
<FormGroup>
<FormLabel>Right Aligned Text Addon</FormLabel>
<FormItem>
<InputGroup compact inputType="number" inputValue={100} />
</FormItem>
</FormGroup>
<FormGroup>
<FormLabel>Search Input</FormLabel>
<FormItem>
<InputGroup inputPlaceholder="Search Term" inputType="search" />
</FormItem>
</FormGroup>
<br />
<p>Compact mode</p>
<FormGroup>
<FormLabel>Search Input</FormLabel>
<FormItem>
<InputGroup
compact
inputPlaceholder="Search Term"
inputType="search"
/>
</FormItem>
</FormGroup>
<br />
<br />
<FormGroup>
<FormLabel>Input with icon on the left</FormLabel>
<FormItem>
<InputGroup
addonPos="before"
glyph="globe"
inputValue="1234567890"
/>
</FormItem>
</FormGroup>
<br />
<p>Compact mode</p>
<FormGroup>
<FormLabel>Input with icon on the left</FormLabel>
<FormItem>
<InputGroup
addonPos="before"
compact
glyph="globe"
inputValue="1234567890"
/>
</FormItem>
</FormGroup>
<br />
<br />
<FormGroup>
<FormLabel>Input with icon on the right</FormLabel>
<FormItem>
<InputGroup glyph="hide" inputValue="1234567890" />
</FormItem>
</FormGroup>
<br />
<p>Compact mode</p>
<FormGroup>
<FormLabel>Input with icon on the right</FormLabel>
<FormItem>
<InputGroup compact glyph="hide" inputValue="1234567890" />
</FormItem>
</FormGroup>
<FormGroup>
<FormLabel>Input with text action</FormLabel>
<FormItem>
<InputGroup actions inputValue="1234567890">
<Button option="transparent">Button</Button>
</InputGroup>
</FormItem>
</FormGroup>
<br />
<p>Compact mode</p>
<FormGroup>
<FormLabel>Input with text action</FormLabel>
<FormItem>
<InputGroup actions compact inputValue="1234567890">
<Button option="transparent">Button</Button>
</InputGroup>
</FormItem>
</FormGroup>
<br />
<br />
<FormGroup>
<FormLabel>Input with icon text action</FormLabel>
<FormItem>
<InputGroup actions>
<Button glyph="navigation-down-arrow" option="transparent" />
</InputGroup>
</FormItem>
</FormGroup>
<br />
<p>Compact mode</p>
<FormGroup>
<FormLabel>Input with icon text action</FormLabel>
<FormItem>
<InputGroup actions compact>
<Button glyph="navigation-down-arrow" option="transparent" />
</InputGroup>
</FormItem>
</FormGroup>
</div>
);
const links = (
<div>
<Link href='#'>Default Link</Link>
<Link disabled href='#'>Disabled Link</Link>
</div>
);
const lists = (
<div>
<List compact noBorder>
<List.Header>List Header</List.Header>
<List.Item>
<List.Text>List Item 1</List.Text>
</List.Item>
<List.Item>
<List.Text>List Item 2</List.Text>
<List.Icon glyph='navigation-right-arrow' />
</List.Item>
<List.Item>
<List.Text>List Item 3</List.Text>
<List.Text secondary>secondary</List.Text>
</List.Item>
<List.Item>
<List.Text>List Item 4</List.Text>
</List.Item>
<List.Footer>List Footer</List.Footer>
</List>
</div>
);
const localizationEditors = (
<div>
<LocalizationEditor
control={{
label: "Localization Editor Label",
language: "EN*",
placeholder: "Enter Label"
}}
menu={[
{
language: "ES",
placeholder: "Enter Label"
},
{
language: "CH",
placeholder: "Enter Label"
},
{
language: "PL",
placeholder: "Enter Label"
}
]}
/>
<br />
<LocalizationEditor
compact
control={{
label: "Localization Editor Compact Mode",
language: "EN*",
placeholder: "Enter Label"
}}
menu={[
{
language: "ES",
placeholder: "Enter Label"
},
{
language: "CH",
placeholder: "Enter Label"
},
{
language: "PL",
placeholder: "Enter Label"
}
]}
/>
<LocalizationEditor
control={{
label: "Localization Editor Label",
language: "EN*",
placeholder: "Enter Label"
}}
menu={[
{
language: "ES",
placeholder: "Enter Label"
},
{
language: "CH",
placeholder: "Enter Label"
},
{
language: "PL",
placeholder: "Enter Label"
}
]}
textarea
/>
</div>
);
const menus = (
<div>
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
<Menu>
<Menu.List>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
</Menu.List>
<Menu.Group title="Group Header">
<Menu.List>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
</Menu.List>
</Menu.Group>
</Menu>
<Menu>
<Menu.List>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
</Menu.List>
</Menu>
<Menu addonBefore>
<Menu.List>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item addon="accept">
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
<Menu.Item>
<Menu.Item url="/">Option 1</Menu.Item>
</Menu.Item>
</Menu.List>
</Menu>
</div>
);
const messageStrips = (
<div>
<MessageStrip
dismissible
link='#'
linkText='link'>
Default MessageStrip
</MessageStrip>
<MessageStrip
dismissible
link='#'
linkText='Learn More'
type='error'>
Error Message.
</MessageStrip>
<MessageStrip noGlyph>
Error Message with no icon
</MessageStrip>
</div>
);
const multiInputs = (
<div>
<MultiInput
data={[
"Apple",
"Apricot",
"Acai",
"African Moringa",
"Bearberry",
"Bilberry",
"Blood orange",
"Barbadine",
"Barbados cherry",
"Balsam Apple",
"Chokeberry",
"Cranberry",
"Cupuacu"
]}
onTagsUpdate={function w() {}}
placeHolder="Select a Fruit"
/>
<MultiInput
compact
data={[
"Apple",
"Apricot",
"Acai",
"African Moringa",
"Bearberry",
"Bilberry",
"Blood orange",
"Barbadine",
"Barbados cherry",
"Balsam Apple",
"Chokeberry",
"Cranberry",
"Cupuacu"
]}
onTagsUpdate={function w() {}}
placeHolder="Select a Fruit"
/>
</div>
);
const objectStatus = (
<ObjectStatus indication={1} size={'l'}/>
);
const paginations = (
<div>
<Pagination itemsTotal={101} onClick={function w() {}} />
<Pagination
initialPage={11}
itemsTotal={101}
onClick={function w() {}}
/>
<Pagination
itemsPerPage={25}
itemsTotal={101}
onClick={function w() {}}
/>
<Pagination
displayTotal={false}
itemsTotal={101}
onClick={function w() {}}
/>
<Pagination
itemsTotal={101}
onClick={function w() {}}
totalText="Dalmations"
/>
</div>
);
const layoutPanel = (
<LayoutPanel>
<LayoutPanel.Header>
LayoutPanel Head
</LayoutPanel.Header>
<LayoutPanel.Body>
LayoutPanel Body
</LayoutPanel.Body>
</LayoutPanel>
);
const popovers = (
<div>
<div>
<div className="fd-container">
<div className="fd-col--shift-3 fd-col--2 fd-has-text-align-center">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-up-arrow"
option="transparent"
/>
}
placement="top-start"
/>
</div>
<div className="fd-col--2 fd-has-text-align-center">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-up-arrow"
option="transparent"
/>
}
placement="top"
/>
</div>
<div className="fd-col--2 fd-has-text-align-center">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-up-arrow"
option="transparent"
/>
}
placement="top-end"
/>
</div>
</div>
<div className="fd-container">
<div className="fd-col--shift-2 fd-col--2">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-left-arrow"
option="transparent"
/>
}
placement="left-start"
/>
</div>
<div className="fd-col--shift-4 fd-col--2 fd-has-text-align-right">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-right-arrow"
option="transparent"
/>
}
placement="right-start"
/>
</div>
</div>
<div className="fd-container">
<div className="fd-col--shift-2 fd-col--2">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-left-arrow"
option="transparent"
/>
}
placement="left"
/>
</div>
<div className="fd-col--shift-4 fd-col--2 fd-has-text-align-right">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-right-arrow"
option="transparent"
/>
}
placement="right"
/>
</div>
</div>
<div className="fd-container">
<div className="fd-col--shift-2 fd-col--2">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-left-arrow"
option="transparent"
/>
}
placement="left-end"
/>
</div>
<div className="fd-col--shift-4 fd-col--2 fd-has-text-align-right">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-right-arrow"
option="transparent"
/>
}
placement="right-end"
/>
</div>
</div>
<div className="fd-container">
<div className="fd-col--shift-3 fd-col--2 fd-has-text-align-center">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-down-arrow"
option="transparent"
/>
}
placement="bottom-start"
/>
</div>
<div className="fd-col--2 fd-has-text-align-center">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button
glyph="navigation-down-arrow"
option="transparent"
/>
}
placement="bottom"
/>
</div>
<div className="fd-col--2 fd-has-text-align-center">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List><|fim▁hole|> glyph="navigation-down-arrow"
option="transparent"
/>
}
placement="bottom-end"
/>
</div>
</div>
</div>
<div className="fd-doc__margin--popover">
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Icon glyph="cart" size="xl" />}
noArrow
placement="left"
/>
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Image
photo="https://placeimg.com/400/400/nature"
size="m"
type="circle"
/>
}
noArrow
placement="top"
/>
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Icon glyph="menu2" size="xl" />}
noArrow
placement="bottom"
/>
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Icon glyph="menu2" size="xl" />}
noArrow
placement="right"
/>
</div>
<div>
<Button onClick={function w() {}}>Show Modal</Button>
<Dialog
actions={[]}
bodyProps={{
style: {
height: "200px",
overflowY: "auto",
textAlign: "center",
width: "400px"
}
}}
onClose={function w() {}}
title="Overflow Example"
>
<p>
Click the icon to show the popover and then scroll within
the modal body...
</p>
<br />
<br />
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Icon glyph="menu2" size="xl" />}
placement="bottom"
/>
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<br />
<br />
</Dialog>
</div>
</div>
);
const searchInputs = (
<div>
<SearchInput
onEnter={function w() {}}
placeholder="Enter a fruit"
searchList={[
{
callback: function w() {},
text: "apple"
},
{
callback: function w() {},
text: "apricot"
},
{
callback: function w() {},
text: "banana"
},
{
callback: function w() {},
text: "blueberry"
},
{
callback: function w() {},
text: "blackberry"
},
{
callback: function w() {},
text: "calabash"
},
{
callback: function w() {},
text: "clementines"
},
{
callback: function w() {},
text: "kiwi"
},
{
callback: function w() {},
text: "orange"
}
]}
/>
<br />
<SearchInput
noSearchBtn
onChange={function w() {}}
placeholder="Enter a fruit"
/>
<br />
<SearchInput
compact
onEnter={function w() {}}
placeholder="Enter a fruit"
searchList={[
{
callback: function w() {},
text: "apple"
},
{
callback: function w() {},
text: "apricot"
},
{
callback: function w() {},
text: "banana"
},
{
callback: function w() {},
text: "blueberry"
},
{
callback: function w() {},
text: "blackberry"
},
{
callback: function w() {},
text: "calabash"
},
{
callback: function w() {},
text: "clementines"
},
{
callback: function w() {},
text: "kiwi"
},
{
callback: function w() {},
text: "orange"
}
]}
/>
</div>
);
const selects = (
<div>
<Select placeholder='Select' options={[
{text: "List Item 1", key: "1"},
{text: "List Item 2", key: "2"}
]} selectedKey={"2"}>
</Select>
<Select compact validationState={{state: 'warning', text: 'Validated'}}>
</Select>
</div>
);
const shellbars = (
<div>
<Shellbar
logo={
<img
alt="SAP"
src="//unpkg.com/fiori-fundamentals/dist/images/sap-logo.png"
/>
}
productTitle="Corporate Portal"
profile={{
colorAccent: 8,
initials: "JS",
userName: "John Snow"
}}
profileMenu={[
{
callback: function w() {},
glyph: "action-settings",
name: "Settings",
size: "s"
},
{
callback: function w() {},
glyph: "log",
name: "Sign Out",
size: "s"
}
]}
/>
<Shellbar
backAction={function w() {}}
logo={
<img
alt="SAP"
src="//unpkg.com/fiori-fundamentals/dist/images/sap-logo.png"
/>
}
productTitle="Corporate Portal"
profile={{
colorAccent: 8,
initials: "JS",
userName: "John Snow"
}}
profileMenu={[
{
callback: function w() {},
glyph: "action-settings",
name: "Settings",
size: "s"
},
{
callback: function w() {},
glyph: "log",
name: "Sign Out",
size: "s"
}
]}
/>
<Shellbar
logoSAP
notifications={{
callback: function w() {},
label: "Notifications",
notificationCount: 2
}}
productMenu={[
{
callback: function w() {},
name: "Application A"
},
{
callback: function w() {},
name: "Application B"
},
{
callback: function w() {},
name: "Application C"
},
{
callback: function w() {},
name: "Application D"
}
]}
productTitle="Corporate Portal"
profile={{
image:
"/fundamental-react/static/media/headshot-male.10d4e22e.jpg",
userName: "John Snow"
}}
profileMenu={[
{
callback: function w() {},
glyph: "action-settings",
name: "Settings",
size: "s"
},
{
callback: function w() {},
glyph: "log",
name: "Sign Out",
size: "s"
}
]}
searchInput={{
callback: function w() {},
label: "Search",
onSearch: function w() {},
placeholder: "Enter a fruit"
}}
subtitle="Subtitle"
/>
<Shellbar
actions={[
{
callback: function w() {},
glyph: "settings",
label: "Settings",
menu: (
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
</Menu.List>
</Menu>
),
notificationCount: 5
}
]}
copilot
logoSAP
notifications={{
callback: function w() {},
label: "Notifications",
noNotificationsBody: (
<Menu>
<Menu.List>
<Menu.Item>There are no notifications</Menu.Item>
</Menu.List>
</Menu>
),
notificationCount: 2,
notificationsBody: (
<Menu>
<Menu.List>
<Menu.Item url="/">Notification 1</Menu.Item>
<Menu.Item url="/">Notification 2</Menu.Item>
<Menu.Item url="/">Notification 3</Menu.Item>
</Menu.List>
</Menu>
)
}}
productMenu={[
{
callback: function w() {},
name: "Application A"
},
{
callback: function w() {},
name: "Application B"
},
{
callback: function w() {},
name: "Application C"
},
{
callback: function w() {},
name: "Application D"
}
]}
productSwitch={{
label: "Product Switcher"
}}
productSwitchList={[
{
callback: function w() {},
glyph: "home",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Fiori Home"
},
{
callback: function w() {},
glyph: "cloud",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "S/4 HANA Cloud"
},
{
callback: function w() {},
glyph: "business-objects-experience",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Analytics Cloud"
},
{
callback: function w() {},
glyph: "activate",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Ariba"
},
{
callback: function w() {},
glyph: "message-success",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "SuccessFactors"
},
{
callback: function w() {},
glyph: "retail-store",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Commerce Cloud"
},
{
callback: function w() {},
glyph: "customer-view",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Gigya"
},
{
callback: function w() {},
glyph: "globe",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Callidus Cloud"
},
{
callback: function w() {},
glyph: "work-history",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Fieldglass"
},
{
callback: function w() {},
glyph: "area-chart",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Concur"
},
{
callback: function w() {},
glyph: "customer-view",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Cloud for Customer"
},
{
callback: function w() {},
glyph: "customer",
image:
"data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",
title: "Cloud Portal"
}
]}
productTitle="Corporate Portal"
profile={{
image:
"/fundamental-react/static/media/headshot-male.10d4e22e.jpg",
userName: "John Snow"
}}
profileMenu={[
{
callback: function w() {},
glyph: "action-settings",
name: "Settings",
size: "s"
},
{
callback: function w() {},
glyph: "log",
name: "Sign Out",
size: "s"
}
]}
searchInput={{
callback: function w() {},
label: "Search",
onSearch: function w() {},
placeholder: "Enter a fruit",
searchList: [
{
callback: function w() {},
text: "apple"
},
{
callback: function w() {},
text: "apricot"
},
{
callback: function w() {},
text: "acai"
},
{
callback: function w() {},
text: "banana"
},
{
callback: function w() {},
text: "berry"
},
{
callback: function w() {},
text: "blueberry"
},
{
callback: function w() {},
text: "blackberry"
},
{
callback: function w() {},
text: "cranberry"
},
{
callback: function w() {},
text: "conkerberry"
},
{
callback: function w() {},
text: "calabash"
},
{
callback: function w() {},
text: "clementines"
},
{
callback: function w() {},
text: "kiwi"
},
{
callback: function w() {},
text: "orange"
}
]
}}
subtitle="Subtitle"
/>
</div>
);
const sideNavs = (
<div>
<SideNav selectedId="item-2">
<SideNav.List>
<SideNav.ListItem id="item-1" name="Link Item" url="#" />
<SideNav.ListItem id="item-2" name="Link Item" url="#" />
<SideNav.ListItem id="item-3" name="Link Item" url="#" />
<SideNav.ListItem id="item-4" name="Link Item" url="#" />
<SideNav.ListItem id="item-5" name="Link Item" url="#" />
</SideNav.List>
</SideNav>
<SideNav selectedId="item_2">
<SideNav.List title="Group Title">
<SideNav.ListItem id="item_1" name="Link Item" url="#" />
<SideNav.ListItem id="item_2">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem id="item_3">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem id="item_4">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem id="item_5">
<a href="#">Link Item</a>
</SideNav.ListItem>
</SideNav.List>
<SideNav.List title="Group Title">
<SideNav.ListItem id="item_6">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem id="item_7">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem id="item_8">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem id="item_9">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem id="item_10">
<a href="#">Link Item</a>
</SideNav.ListItem>
</SideNav.List>
</SideNav>
<SideNav selectedId="item-2">
<SideNav.List>
<SideNav.ListItem id="item-1" name="Link Item 1" url="#" />
<SideNav.ListItem id="item-2" name="Link Item 2" url="#">
<SideNav.List>
<SideNav.ListItem
id="subitem_21"
name="Item 1"
url="#"
/>
<SideNav.ListItem
id="subitem_22"
name="Item 2"
url="#"
/>
<SideNav.ListItem
id="subitem_23"
name="Item 3"
url="#"
/>
<SideNav.ListItem
id="subitem_24"
name="Item 4"
url="#"
/>
</SideNav.List>
</SideNav.ListItem>
<SideNav.ListItem id="item_3" name="Link Item 3" url="#" />
<SideNav.ListItem id="item_4" name="Link Item 4" url="#">
<SideNav.List>
<SideNav.ListItem
id="subitem_41"
name="Item 1"
url="#"
/>
<SideNav.ListItem
id="subitem_42"
name="Item 2"
url="#"
/>
<SideNav.ListItem
id="subitem_43"
name="Item 3"
url="#"
/>
<SideNav.ListItem
id="subitem_44"
name="Item 4"
url="#"
/>
</SideNav.List>
</SideNav.ListItem>
<SideNav.ListItem id="item_5" name="Link Item 5" url="#" />
</SideNav.List>
</SideNav>
<SideNav selectedId="item-2">
<SideNav.List>
<SideNav.ListItem glyph="home" id="item-1">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem glyph="home" id="item-2">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem glyph="home" id="item-3">
<a href="#">Link Item</a>
</SideNav.ListItem>
<SideNav.ListItem
glyph="home"
id="item-4"
name="Link Item"
url="#"
/>
<SideNav.ListItem
glyph="home"
id="item-5"
name="Link Item"
url="#"
/>
</SideNav.List>
</SideNav>
<SideNav selectedId="item-2">
<SideNav.List>
<SideNav.ListItem glyph="home" id="item-1" url="#" />
<SideNav.ListItem glyph="home" id="item-2" url="#" />
<SideNav.ListItem glyph="home" id="item-3" url="#" />
<SideNav.ListItem glyph="home" id="item-4" url="#" />
<SideNav.ListItem glyph="home" id="item-5" url="#" />
</SideNav.List>
</SideNav>
</div>
);
const stepInputs = (
<div>
<StepInput disabled value={10} />
<StepInput readOnly value={10} />
<StepInput
placeholder='Error'
validationState={{
state: 'error',
text: 'Test validation state'
}} />
</div>
);
const tables = (
<div>
<Table
headers={[
"Column Header 1",
"Column Header 2",
"Column Header 3",
"Column Header 4"
]}
tableData={[
{
rowData: ["Data 1", "Data 2", "Data 3", "Data 4"]
},
{
rowData: ["Data 5", "Data 6", "Data 7", "Data 8"]
}
]}
/>
<Table
headers={[
<input type="checkbox" />,
"Avatar",
"email",
"First Name",
"Last Name",
"Date",
" "
]}
tableData={[
{
rowData: [
<input type="checkbox" />,
<Image
photo="https://robohash.org/green?size=50x50"
size="m"
/>,
<a className="fd-has-font-weight-semi" href="#">
[email protected]
</a>,
"First Name",
"Last Name",
"01/26/17",
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button glyph="vertical-grip" option="transparent" />
}
placement="bottom-end"
/>
]
},
{
rowData: [
<input type="checkbox" />,
<Image
photo="https://robohash.org/brown?size=50x50"
size="m"
/>,
<a className="fd-has-font-weight-semi" href="#">
[email protected]
</a>,
"First Name",
"Last Name",
"07/29/18",
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button glyph="vertical-grip" option="transparent" />
}
placement="bottom-end"
/>
]
},
{
rowData: [
<input type="checkbox" />,
<Image
photo="https://robohash.org/Q27.png?set=set1&size=50x50"
size="m"
/>,
<a className="fd-has-font-weight-semi" href="#">
[email protected]
</a>,
"First Name",
"Last Name",
"05/26/18",
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button glyph="vertical-grip" option="transparent" />
}
placement="bottom-end"
/>
]
},
{
rowData: [
<input type="checkbox" />,
<Image
photo="https://robohash.org/water?&size=50x50"
size="m"
/>,
<a className="fd-has-font-weight-semi" href="#">
[email protected]
</a>,
"First Name",
"Last Name",
"01/26/14",
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={
<Button glyph="vertical-grip" option="transparent" />
}
placement="bottom-end"
/>
]
}
]}
/>
</div>
);
const tabs = (
<div>
<TabGroup>
<Tab id="1" title="Tab 1">
Lorem ipsum dolor sit amet consectetur adipisicing elit.
</Tab>
<Tab id="2" title="Tab 2">
Numquam libero id corporis odit animi voluptat, Lorem ipsum
dolor sit amet consectetur adipisicing elit. Possimus quia
tempore eligendi tempora repellat officia rerum laudantium,
veritatis officiis asperiores ipsum nam, distinctio, dolor
provident culpa voluptatibus esse deserunt animi?
</Tab>
<Tab disabled id="3" title="Tab 3">
Lorem ipsum dolor sit amet consectetur adipisicing elit.
</Tab>
<Tab glyph="cart" id="4">
Please review your shopping chart.
</Tab>
</TabGroup>
</div>
);
const tiles = (
<div>
<Tile>
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<Tile>
<Tile.Media>
<Image photo="https://placeimg.com/400/400/nature" size="m" />
</Tile.Media>
<Tile.Content title="Tile Title" />
</Tile>
<br />
<Tile role="button">
<Tile.Media>
<Image
photo="https://placeimg.com/400/400/nature"
size="l"
type="circle"
/>
</Tile.Media>
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<br />
<Tile role="button">
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<Tile>
<Tile.Content title="Tile Title" />
<Tile.Actions>
<Popover
body={
<Menu>
<Menu.List>
<Menu.Item url="/">Option 1</Menu.Item>
<Menu.Item url="/">Option 2</Menu.Item>
<Menu.Item url="/">Option 3</Menu.Item>
<Menu.Item url="/">Option 4</Menu.Item>
</Menu.List>
</Menu>
}
control={<Button glyph="vertical-grip" option="transparent" />}
placement="bottom-end"
/>
</Tile.Actions>
</Tile>
<Tile product role="button">
<Tile.Media image="https://techne.yaas.io/images/product-thumbnail-wide.png" />
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<br />
<Tile product disabled>
<Tile.Media image="https://techne.yaas.io/images/product-thumbnail-wide.png" />
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<LayoutGrid cols={4}>
<Tile colorAccent={7} rowSpan={2}>
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<Tile>
<Tile.Media>
<Image
photo="https://placeimg.com/400/400/nature"
size="l"
type="circle"
/>
</Tile.Media>
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<Tile>
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<Tile role="button">
<Tile.Content title="Tile Title" />
</Tile>
<Tile>
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
<Tile colorAccent={4} columnSpan={2}>
<Tile.Content title="Tile Title">
<p>Tile Description</p>
</Tile.Content>
</Tile>
</LayoutGrid>
</div>
);
const times = (
<div>
<Time />
<Time format12Hours />
<Time spinners={false} />
<Time showSecond={false} />
<Time disabled />
</div>
);
const timePickers = (
<div>
<TimePicker />
<TimePicker format12Hours />
<TimePicker showSecond={false} />
<TimePicker disabled />
</div>
);
const title = (
<div>
<Title level={1}>Fundamental React Title 1</Title>
<Title level={2}>Fundamental React Title 2</Title>
<Title level={3}>Fundamental React Title 3</Title>
<Title level={4}>Fundamental React Title 4</Title>
<Title level={5}>Fundamental React Title 5</Title>
<Title level={6}>Fundamental React Title 6</Title>
</div>
);
const switches = (
<div>
<Switch>
switch
</Switch>
</div>
);
const tokens = (
<div>
<Token onClick={function w() {}}>Bibendum</Token>
<Token onClick={function w() {}}>Lorem</Token>
<Token onClick={function w() {}}>Dolor</Token>
<Token onClick={function w() {}}>Filter</Token>
</div>
);
const treeViews = (
<div>
<TreeView>
<TreeView.Tree>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Row 1</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Row 2</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Child 1</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Child 2</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Row 3</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Child 1</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>
Grandchild 1
</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>
Grandchild 2
</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
</TreeView.Tree>
</TreeView>
<TreeView>
<TreeView.Head>
<TreeView.Col>Column Header 1</TreeView.Col>
<TreeView.Col>Column Header 2</TreeView.Col>
<TreeView.Col>Column Header 3</TreeView.Col>
<TreeView.Col>Column Header 4</TreeView.Col>
</TreeView.Head>
<TreeView.Tree>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Row 1</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Child 1</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>
Grandchild 1
</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>
Great Grandchild 1
</TreeView.Col>
<TreeView.Col>
Data Col 2
</TreeView.Col>
<TreeView.Col>
Data Col 3
</TreeView.Col>
<TreeView.Col>
Data Col 4
</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Child 2</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Row 2</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Child 1</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Child 2</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Row 3</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Row 4</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Child 1</TreeView.Col>
<TreeView.Col>Data Col 2</TreeView.Col>
<TreeView.Col>Data Col 3</TreeView.Col>
<TreeView.Col>Data Col 4</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
</TreeView.Tree>
</TreeView>
<TreeView>
<TreeView.Head>
<TreeView.Col>Column Header 1</TreeView.Col>
<TreeView.Col>Column Header 2</TreeView.Col>
<TreeView.Col>Column Header 3</TreeView.Col>
<TreeView.Col>Column Header 4</TreeView.Col>
</TreeView.Head>
<TreeView.Tree>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col>Row 1</TreeView.Col>
</TreeView.Row>
<TreeView.Branch>
<TreeView.Item>
<TreeView.Row>
<TreeView.Col />
<TreeView.Col>
<a href="http://www.google.com">Google</a>
</TreeView.Col>
<TreeView.Col>
<a href="http://www.bing.com">Bing</a>
</TreeView.Col>
<TreeView.Col>
<a href="http://www.yahoo.com">Yahoo</a>
</TreeView.Col>
</TreeView.Row>
</TreeView.Item>
</TreeView.Branch>
</TreeView.Item>
</TreeView.Tree>
</TreeView>
</div>
);<|fim▁end|> | </Menu>
}
control={
<Button |
<|file_name|>table_wrapper.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! CSS tables.
//!
//! This follows the "More Precise Definitions of Inline Layout and Table Layout" proposal written
//! by L. David Baron (Mozilla) here:
//!
//! http://dbaron.org/css/intrinsic/
//!
//! Hereafter this document is referred to as INTRINSIC.
use app_units::Au;
use block::{AbsoluteNonReplaced, BlockFlow, FloatNonReplaced, ISizeAndMarginsComputer, ISizeConstraintInput};
use block::{ISizeConstraintSolution, MarginsMayCollapseFlag};
use context::LayoutContext;
use display_list::{BlockFlowDisplayListBuilding, DisplayListBuildState, StackingContextCollectionFlags};
use display_list::StackingContextCollectionState;
use euclid::Point2D;
use floats::FloatKind;
use flow::{Flow, FlowClass, ImmutableFlowUtils, FlowFlags, OpaqueFlow};
use fragment::{Fragment, FragmentBorderBoxIterator, Overflow};
use gfx_traits::print_tree::PrintTree;
use model::MaybeAuto;
use std::cmp::{max, min};
use std::fmt;
use std::ops::Add;
use style::computed_values::{position, table_layout};
use style::context::SharedStyleContext;
use style::logical_geometry::{LogicalRect, LogicalSize};
use style::properties::ComputedValues;
use style::values::CSSFloat;
use style::values::computed::LengthOrPercentageOrAuto;
use table::{ColumnComputedInlineSize, ColumnIntrinsicInlineSize};
#[derive(Clone, Copy, Debug, Serialize)]
pub enum TableLayout {
Fixed,
Auto,
}
#[allow(unsafe_code)]
unsafe impl ::flow::HasBaseFlow for TableWrapperFlow {}
/// A table wrapper flow based on a block formatting context.
#[derive(Serialize)]
#[repr(C)]
pub struct TableWrapperFlow {
pub block_flow: BlockFlow,
/// Intrinsic column inline sizes according to INTRINSIC § 4.1
pub column_intrinsic_inline_sizes: Vec<ColumnIntrinsicInlineSize>,
/// Table-layout property
pub table_layout: TableLayout,
}
impl TableWrapperFlow {
pub fn from_fragment(fragment: Fragment) -> TableWrapperFlow {
TableWrapperFlow::from_fragment_and_float_kind(fragment, None)
}
pub fn from_fragment_and_float_kind(
fragment: Fragment,
float_kind: Option<FloatKind>,
) -> TableWrapperFlow {
let mut block_flow = BlockFlow::from_fragment_and_float_kind(fragment, float_kind);
let table_layout =
if block_flow.fragment().style().get_table().table_layout == table_layout::T::Fixed {
TableLayout::Fixed
} else {
TableLayout::Auto
};
TableWrapperFlow {
block_flow: block_flow,
column_intrinsic_inline_sizes: vec![],
table_layout: table_layout,
}
}
fn border_padding_and_spacing(&mut self) -> (Au, Au) {
let (mut table_border_padding, mut spacing) = (Au(0), Au(0));
for kid in self.block_flow.base.child_iter_mut() {
if kid.is_table() {
let kid_table = kid.as_table();
spacing = kid_table.total_horizontal_spacing();
table_border_padding = kid_table
.block_flow
.fragment
.border_padding
.inline_start_end();
break;
}
}
(table_border_padding, spacing)
}
// Instructs our first child, which is the table itself, to compute its border and padding.
//
// This is a little weird because we're computing border/padding/margins for our child,
// when normally the child computes it itself. But it has to be this way because the
// padding will affect where we place the child. This is an odd artifact of the way that
// tables are separated into table flows and table wrapper flows.
fn compute_border_and_padding_of_table(&mut self) {
let available_inline_size = self.block_flow.base.block_container_inline_size;
for kid in self.block_flow.base.child_iter_mut() {
if !kid.is_table() {
continue;
}
let kid_table = kid.as_mut_table();
let kid_block_flow = &mut kid_table.block_flow;
kid_block_flow
.fragment
.compute_border_and_padding(available_inline_size);
kid_block_flow
.fragment
.compute_block_direction_margins(available_inline_size);
kid_block_flow
.fragment
.compute_inline_direction_margins(available_inline_size);
return;
}
}
/// Calculates table column sizes for automatic layout per INTRINSIC § 4.3.
fn calculate_table_column_sizes_for_automatic_layout(
&mut self,
intermediate_column_inline_sizes: &mut [IntermediateColumnInlineSize],
) {
let available_inline_size = self.available_inline_size();
// Compute all the guesses for the column sizes, and sum them.
let mut total_guess = AutoLayoutCandidateGuess::new();
let guesses: Vec<AutoLayoutCandidateGuess> = self
.column_intrinsic_inline_sizes
.iter()
.map(|column_intrinsic_inline_size| {
let guess = AutoLayoutCandidateGuess::from_column_intrinsic_inline_size(
column_intrinsic_inline_size,
available_inline_size,
);
total_guess = &total_guess + &guess;
guess
}).collect();
// Assign inline sizes.
let selection =
SelectedAutoLayoutCandidateGuess::select(&total_guess, available_inline_size);
let mut total_used_inline_size = Au(0);
for (intermediate_column_inline_size, guess) in intermediate_column_inline_sizes
.iter_mut()
.zip(guesses.iter())
{
intermediate_column_inline_size.size = guess.calculate(selection);
intermediate_column_inline_size.percentage = 0.0;
total_used_inline_size = total_used_inline_size + intermediate_column_inline_size.size
}
// Distribute excess inline-size if necessary per INTRINSIC § 4.4.
//
// FIXME(pcwalton, spec): How do I deal with fractional excess?
let excess_inline_size = available_inline_size - total_used_inline_size;
if excess_inline_size > Au(0) &&
selection ==
SelectedAutoLayoutCandidateGuess::UsePreferredGuessAndDistributeExcessInlineSize
{
let mut info = ExcessInlineSizeDistributionInfo::new();
for column_intrinsic_inline_size in &self.column_intrinsic_inline_sizes {
info.update(column_intrinsic_inline_size)
}
let mut total_distributed_excess_size = Au(0);
for (intermediate_column_inline_size, column_intrinsic_inline_size) in
intermediate_column_inline_sizes
.iter_mut()
.zip(self.column_intrinsic_inline_sizes.iter())
{
info.distribute_excess_inline_size_to_column(
intermediate_column_inline_size,
column_intrinsic_inline_size,
excess_inline_size,
&mut total_distributed_excess_size,
)
}
total_used_inline_size = available_inline_size
}
self.set_inline_size(total_used_inline_size)
}
fn available_inline_size(&mut self) -> Au {
let available_inline_size = self.block_flow.fragment.border_box.size.inline;
let (table_border_padding, spacing) = self.border_padding_and_spacing();
// FIXME(pcwalton, spec): INTRINSIC § 8 does not properly define how to compute this, but
// says "the basic idea is the same as the shrink-to-fit width that CSS2.1 defines". So we
// just use the shrink-to-fit inline size.
let available_inline_size = match self.block_flow.fragment.style().content_inline_size() {
LengthOrPercentageOrAuto::Auto => {
self.block_flow
.get_shrink_to_fit_inline_size(available_inline_size) -
table_border_padding
},
// FIXME(mttr): This fixes #4421 without breaking our current reftests, but I'm not
// completely sure this is "correct".
//
// That said, `available_inline_size` is, as far as I can tell, equal to the table's
// computed width property (W) and is used from this point forward in a way that seems
// to correspond with CSS 2.1 § 17.5.2.2 under "Column and caption widths influence the
// final table width as follows: …"
_ => available_inline_size,
};
available_inline_size - spacing
}
fn set_inline_size(&mut self, total_used_inline_size: Au) {
let (table_border_padding, spacing) = self.border_padding_and_spacing();
self.block_flow.fragment.border_box.size.inline =
total_used_inline_size + table_border_padding + spacing;
self.block_flow.base.position.size.inline = total_used_inline_size +
table_border_padding +
spacing +
self.block_flow.fragment.margin.inline_start_end();
let writing_mode = self.block_flow.base.writing_mode;
let container_mode = self.block_flow.base.block_container_writing_mode;
if writing_mode.is_bidi_ltr() != container_mode.is_bidi_ltr() {
// If our "start" direction is different from our parent flow, then `border_box.start.i`
// depends on `border_box.size.inline`.
self.block_flow.fragment.border_box.start.i =
self.block_flow.base.block_container_inline_size -
self.block_flow.fragment.margin.inline_end -
self.block_flow.fragment.border_box.size.inline;
}
}
fn compute_used_inline_size(
&mut self,
shared_context: &SharedStyleContext,
parent_flow_inline_size: Au,
intermediate_column_inline_sizes: &[IntermediateColumnInlineSize],
) {
let (border_padding, spacing) = self.border_padding_and_spacing();
let minimum_width_of_all_columns = intermediate_column_inline_sizes.iter().fold(
border_padding + spacing,
|accumulator, intermediate_column_inline_sizes| {
accumulator + intermediate_column_inline_sizes.size
},
);
let preferred_width_of_all_columns = self.column_intrinsic_inline_sizes.iter().fold(
border_padding + spacing,
|accumulator, column_intrinsic_inline_sizes| {
accumulator + column_intrinsic_inline_sizes.preferred
},
);
// Delegate to the appropriate inline size computer to find the constraint inputs and write
// the constraint solutions in.
if self.block_flow.base.flags.is_float() {
let inline_size_computer = FloatedTable {
minimum_width_of_all_columns: minimum_width_of_all_columns,
preferred_width_of_all_columns: preferred_width_of_all_columns,
table_border_padding: border_padding,
};
let input = inline_size_computer.compute_inline_size_constraint_inputs(
&mut self.block_flow,
parent_flow_inline_size,
shared_context,
);
let solution =
inline_size_computer.solve_inline_size_constraints(&mut self.block_flow, &input);
inline_size_computer
.set_inline_size_constraint_solutions(&mut self.block_flow, solution);
inline_size_computer
.set_inline_position_of_flow_if_necessary(&mut self.block_flow, solution);
return;
}
if !self
.block_flow
.base
.flags
.contains(FlowFlags::INLINE_POSITION_IS_STATIC)
{
let inline_size_computer = AbsoluteTable {
minimum_width_of_all_columns: minimum_width_of_all_columns,
preferred_width_of_all_columns: preferred_width_of_all_columns,
table_border_padding: border_padding,
};
let input = inline_size_computer.compute_inline_size_constraint_inputs(
&mut self.block_flow,
parent_flow_inline_size,
shared_context,
);
let solution =
inline_size_computer.solve_inline_size_constraints(&mut self.block_flow, &input);
inline_size_computer
.set_inline_size_constraint_solutions(&mut self.block_flow, solution);
inline_size_computer
.set_inline_position_of_flow_if_necessary(&mut self.block_flow, solution);
return;
}
let inline_size_computer = Table {
minimum_width_of_all_columns: minimum_width_of_all_columns,
preferred_width_of_all_columns: preferred_width_of_all_columns,
table_border_padding: border_padding,
};
let input = inline_size_computer.compute_inline_size_constraint_inputs(
&mut self.block_flow,
parent_flow_inline_size,
shared_context,
);
let solution =
inline_size_computer.solve_inline_size_constraints(&mut self.block_flow, &input);
inline_size_computer.set_inline_size_constraint_solutions(&mut self.block_flow, solution);
inline_size_computer
.set_inline_position_of_flow_if_necessary(&mut self.block_flow, solution);
}
}
impl Flow for TableWrapperFlow {
fn class(&self) -> FlowClass {
FlowClass::TableWrapper
}
fn as_mut_table_wrapper(&mut self) -> &mut TableWrapperFlow {
self
}
fn as_table_wrapper(&self) -> &TableWrapperFlow {
self
}
fn as_mut_block(&mut self) -> &mut BlockFlow {
&mut self.block_flow
}
fn as_block(&self) -> &BlockFlow {
&self.block_flow
}
fn mark_as_root(&mut self) {
self.block_flow.mark_as_root();
}
fn bubble_inline_sizes(&mut self) {
// Get the intrinsic column inline-sizes info from the table flow.
for kid in self.block_flow.base.child_iter_mut() {
debug_assert!(kid.is_table_caption() || kid.is_table());
if kid.is_table() {
let table = kid.as_table();
self.column_intrinsic_inline_sizes = table.column_intrinsic_inline_sizes.clone();
}
}
self.block_flow.bubble_inline_sizes();
}
fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) {
debug!(
"assign_inline_sizes({}): assigning inline_size for flow",
if self.block_flow.base.flags.is_float() {
"floated table_wrapper"
} else {
"table_wrapper"
}
);
let shared_context = layout_context.shared_context();
self.block_flow
.initialize_container_size_for_root(shared_context);
let mut intermediate_column_inline_sizes = self
.column_intrinsic_inline_sizes
.iter()
.map(
|column_intrinsic_inline_size| IntermediateColumnInlineSize {
size: column_intrinsic_inline_size.minimum_length,
percentage: column_intrinsic_inline_size.percentage,
},
).collect::<Vec<_>>();
// Our inline-size was set to the inline-size of the containing block by the flow's parent.
// Now compute the real value.
let containing_block_inline_size = self.block_flow.base.block_container_inline_size;
if self.block_flow.base.flags.is_float() {
self.block_flow
.float
.as_mut()
.unwrap()
.containing_inline_size = containing_block_inline_size;
}
// This has to be done before computing our inline size because `compute_used_inline_size`
// internally consults the border and padding of the table.
self.compute_border_and_padding_of_table();
self.compute_used_inline_size(
shared_context,
containing_block_inline_size,
&intermediate_column_inline_sizes,
);
match self.table_layout {
TableLayout::Auto => self.calculate_table_column_sizes_for_automatic_layout(
&mut intermediate_column_inline_sizes,
),
TableLayout::Fixed => {},
}
let inline_start_content_edge = self.block_flow.fragment.border_box.start.i;
let content_inline_size = self.block_flow.fragment.border_box.size.inline;
let inline_end_content_edge = self.block_flow.fragment.border_padding.inline_end +
self.block_flow.fragment.margin.inline_end;
// In case of fixed layout, column inline-sizes are calculated in table flow.
let assigned_column_inline_sizes = match self.table_layout {
TableLayout::Fixed => None,
TableLayout::Auto => Some(
intermediate_column_inline_sizes
.iter()
.map(|sizes| ColumnComputedInlineSize { size: sizes.size })
.collect::<Vec<_>>(),
),
};
match assigned_column_inline_sizes {
None => self.block_flow.propagate_assigned_inline_size_to_children(
shared_context,
inline_start_content_edge,
inline_end_content_edge,
content_inline_size,
|_, _, _, _, _, _| {},<|fim▁hole|> ),
Some(ref assigned_column_inline_sizes) => {
self.block_flow.propagate_assigned_inline_size_to_children(
shared_context,
inline_start_content_edge,
inline_end_content_edge,
content_inline_size,
|child_flow, _, _, _, _, _| {
if child_flow.class() == FlowClass::Table {
child_flow.as_mut_table().column_computed_inline_sizes =
assigned_column_inline_sizes.to_vec();
}
},
)
},
}
}
fn assign_block_size(&mut self, layout_context: &LayoutContext) {
debug!("assign_block_size: assigning block_size for table_wrapper");
let remaining = self.block_flow.assign_block_size_block_base(
layout_context,
None,
MarginsMayCollapseFlag::MarginsMayNotCollapse,
);
debug_assert!(remaining.is_none());
}
fn compute_stacking_relative_position(&mut self, layout_context: &LayoutContext) {
self.block_flow
.compute_stacking_relative_position(layout_context)
}
fn place_float_if_applicable<'a>(&mut self) {
self.block_flow.place_float_if_applicable()
}
fn assign_block_size_for_inorder_child_if_necessary(
&mut self,
layout_context: &LayoutContext,
parent_thread_id: u8,
content_box: LogicalRect<Au>,
) -> bool {
self.block_flow
.assign_block_size_for_inorder_child_if_necessary(
layout_context,
parent_thread_id,
content_box,
)
}
fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) {
self.block_flow
.update_late_computed_inline_position_if_necessary(inline_position)
}
fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) {
self.block_flow
.update_late_computed_block_position_if_necessary(block_position)
}
fn generated_containing_block_size(&self, flow: OpaqueFlow) -> LogicalSize<Au> {
self.block_flow.generated_containing_block_size(flow)
}
fn build_display_list(&mut self, state: &mut DisplayListBuildState) {
self.block_flow.build_display_list(state);
}
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
self.block_flow.collect_stacking_contexts_for_block(
state,
StackingContextCollectionFlags::POSITION_NEVER_CREATES_CONTAINING_BLOCK |
StackingContextCollectionFlags::NEVER_CREATES_CLIP_SCROLL_NODE,
);
}
fn repair_style(&mut self, new_style: &::ServoArc<ComputedValues>) {
self.block_flow.repair_style(new_style)
}
fn compute_overflow(&self) -> Overflow {
self.block_flow.compute_overflow()
}
fn iterate_through_fragment_border_boxes(
&self,
iterator: &mut FragmentBorderBoxIterator,
level: i32,
stacking_context_position: &Point2D<Au>,
) {
self.block_flow.iterate_through_fragment_border_boxes(
iterator,
level,
stacking_context_position,
)
}
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {
self.block_flow.mutate_fragments(mutator)
}
fn print_extra_flow_children(&self, print_tree: &mut PrintTree) {
self.block_flow.print_extra_flow_children(print_tree);
}
fn positioning(&self) -> position::T {
self.block_flow.positioning()
}
}
impl fmt::Debug for TableWrapperFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.block_flow.base.flags.is_float() {
write!(f, "TableWrapperFlow(Float): {:?}", self.block_flow)
} else {
write!(f, "TableWrapperFlow: {:?}", self.block_flow)
}
}
}
/// The layout "guesses" defined in INTRINSIC § 4.3.
struct AutoLayoutCandidateGuess {
/// The column inline-size assignment where each column is assigned its intrinsic minimum
/// inline-size.
minimum_guess: Au,
/// The column inline-size assignment where:
/// * A column with an intrinsic percentage inline-size greater than 0% is assigned the
/// larger of:
/// - Its intrinsic percentage inline-size times the assignable inline-size;
/// - Its intrinsic minimum inline-size;
/// * Other columns receive their intrinsic minimum inline-size.
minimum_percentage_guess: Au,
/// The column inline-size assignment where:
/// * Each column with an intrinsic percentage inline-size greater than 0% is assigned the
/// larger of:
/// - Its intrinsic percentage inline-size times the assignable inline-size;
/// - Its intrinsic minimum inline-size;
/// * Any other column that is constrained is assigned its intrinsic preferred inline-size;
/// * Other columns are assigned their intrinsic minimum inline-size.
minimum_specified_guess: Au,
/// The column inline-size assignment where:
/// * Each column with an intrinsic percentage inline-size greater than 0% is assigned the
/// larger of:
/// - Its intrinsic percentage inline-size times the assignable inline-size;
/// - Its intrinsic minimum inline-size;
/// * Other columns are assigned their intrinsic preferred inline-size.
preferred_guess: Au,
}
impl AutoLayoutCandidateGuess {
/// Creates a guess with all elements initialized to zero.
fn new() -> AutoLayoutCandidateGuess {
AutoLayoutCandidateGuess {
minimum_guess: Au(0),
minimum_percentage_guess: Au(0),
minimum_specified_guess: Au(0),
preferred_guess: Au(0),
}
}
/// Fills in the inline-size guesses for this column per INTRINSIC § 4.3.
fn from_column_intrinsic_inline_size(
column_intrinsic_inline_size: &ColumnIntrinsicInlineSize,
assignable_inline_size: Au,
) -> AutoLayoutCandidateGuess {
let minimum_percentage_guess = max(
assignable_inline_size.scale_by(column_intrinsic_inline_size.percentage),
column_intrinsic_inline_size.minimum_length,
);
AutoLayoutCandidateGuess {
minimum_guess: column_intrinsic_inline_size.minimum_length,
minimum_percentage_guess: minimum_percentage_guess,
// FIXME(pcwalton): We need the notion of *constrainedness* per INTRINSIC § 4 to
// implement this one correctly.
minimum_specified_guess: if column_intrinsic_inline_size.percentage > 0.0 {
minimum_percentage_guess
} else if column_intrinsic_inline_size.constrained {
column_intrinsic_inline_size.preferred
} else {
column_intrinsic_inline_size.minimum_length
},
preferred_guess: if column_intrinsic_inline_size.percentage > 0.0 {
minimum_percentage_guess
} else {
column_intrinsic_inline_size.preferred
},
}
}
/// Calculates the inline-size, interpolating appropriately based on the value of `selection`.
///
/// This does *not* distribute excess inline-size. That must be done later if necessary.
fn calculate(&self, selection: SelectedAutoLayoutCandidateGuess) -> Au {
match selection {
SelectedAutoLayoutCandidateGuess::UseMinimumGuess => self.minimum_guess,
SelectedAutoLayoutCandidateGuess::
InterpolateBetweenMinimumGuessAndMinimumPercentageGuess(weight) => {
interp(self.minimum_guess, self.minimum_percentage_guess, weight)
}
SelectedAutoLayoutCandidateGuess::
InterpolateBetweenMinimumPercentageGuessAndMinimumSpecifiedGuess(weight) => {
interp(self.minimum_percentage_guess, self.minimum_specified_guess, weight)
}
SelectedAutoLayoutCandidateGuess::
InterpolateBetweenMinimumSpecifiedGuessAndPreferredGuess(weight) => {
interp(self.minimum_specified_guess, self.preferred_guess, weight)
}
SelectedAutoLayoutCandidateGuess::UsePreferredGuessAndDistributeExcessInlineSize => {
self.preferred_guess
}
}
}
}
impl<'a> Add for &'a AutoLayoutCandidateGuess {
type Output = AutoLayoutCandidateGuess;
#[inline]
fn add(self, other: &AutoLayoutCandidateGuess) -> AutoLayoutCandidateGuess {
AutoLayoutCandidateGuess {
minimum_guess: self.minimum_guess + other.minimum_guess,
minimum_percentage_guess: self.minimum_percentage_guess +
other.minimum_percentage_guess,
minimum_specified_guess: self.minimum_specified_guess + other.minimum_specified_guess,
preferred_guess: self.preferred_guess + other.preferred_guess,
}
}
}
/// The `CSSFloat` member specifies the weight of the smaller of the two guesses, on a scale from
/// 0.0 to 1.0.
#[derive(Clone, Copy, Debug, PartialEq)]
enum SelectedAutoLayoutCandidateGuess {
UseMinimumGuess,
InterpolateBetweenMinimumGuessAndMinimumPercentageGuess(CSSFloat),
InterpolateBetweenMinimumPercentageGuessAndMinimumSpecifiedGuess(CSSFloat),
InterpolateBetweenMinimumSpecifiedGuessAndPreferredGuess(CSSFloat),
UsePreferredGuessAndDistributeExcessInlineSize,
}
impl SelectedAutoLayoutCandidateGuess {
/// See INTRINSIC § 4.3.
///
/// FIXME(pcwalton, INTRINSIC spec): INTRINSIC doesn't specify whether these are exclusive or
/// inclusive ranges.
fn select(
guess: &AutoLayoutCandidateGuess,
assignable_inline_size: Au,
) -> SelectedAutoLayoutCandidateGuess {
if assignable_inline_size < guess.minimum_guess {
SelectedAutoLayoutCandidateGuess::UseMinimumGuess
} else if assignable_inline_size < guess.minimum_percentage_guess {
let weight = weight(
guess.minimum_guess,
assignable_inline_size,
guess.minimum_percentage_guess,
);
SelectedAutoLayoutCandidateGuess::InterpolateBetweenMinimumGuessAndMinimumPercentageGuess(weight)
} else if assignable_inline_size < guess.minimum_specified_guess {
let weight = weight(
guess.minimum_percentage_guess,
assignable_inline_size,
guess.minimum_specified_guess,
);
SelectedAutoLayoutCandidateGuess::InterpolateBetweenMinimumPercentageGuessAndMinimumSpecifiedGuess(weight)
} else if assignable_inline_size < guess.preferred_guess {
let weight = weight(
guess.minimum_specified_guess,
assignable_inline_size,
guess.preferred_guess,
);
SelectedAutoLayoutCandidateGuess::InterpolateBetweenMinimumSpecifiedGuessAndPreferredGuess(weight)
} else {
SelectedAutoLayoutCandidateGuess::UsePreferredGuessAndDistributeExcessInlineSize
}
}
}
/// Computes the weight needed to linearly interpolate `middle` between two guesses `low` and
/// `high` as specified by INTRINSIC § 4.3.
fn weight(low: Au, middle: Au, high: Au) -> CSSFloat {
(middle - low).to_f32_px() / (high - low).to_f32_px()
}
/// Linearly interpolates between two guesses, as specified by INTRINSIC § 4.3.
fn interp(low: Au, high: Au, weight: CSSFloat) -> Au {
low + (high - low).scale_by(weight)
}
struct ExcessInlineSizeDistributionInfo {
preferred_inline_size_of_nonconstrained_columns_with_no_percentage: Au,
count_of_nonconstrained_columns_with_no_percentage: u32,
preferred_inline_size_of_constrained_columns_with_no_percentage: Au,
total_percentage: CSSFloat,
column_count: u32,
}
impl ExcessInlineSizeDistributionInfo {
fn new() -> ExcessInlineSizeDistributionInfo {
ExcessInlineSizeDistributionInfo {
preferred_inline_size_of_nonconstrained_columns_with_no_percentage: Au(0),
count_of_nonconstrained_columns_with_no_percentage: 0,
preferred_inline_size_of_constrained_columns_with_no_percentage: Au(0),
total_percentage: 0.0,
column_count: 0,
}
}
fn update(&mut self, column_intrinsic_inline_size: &ColumnIntrinsicInlineSize) {
if !column_intrinsic_inline_size.constrained &&
column_intrinsic_inline_size.percentage == 0.0
{
self.preferred_inline_size_of_nonconstrained_columns_with_no_percentage =
self.preferred_inline_size_of_nonconstrained_columns_with_no_percentage +
column_intrinsic_inline_size.preferred;
self.count_of_nonconstrained_columns_with_no_percentage += 1
}
if column_intrinsic_inline_size.constrained &&
column_intrinsic_inline_size.percentage == 0.0
{
self.preferred_inline_size_of_constrained_columns_with_no_percentage =
self.preferred_inline_size_of_constrained_columns_with_no_percentage +
column_intrinsic_inline_size.preferred
}
self.total_percentage += column_intrinsic_inline_size.percentage;
self.column_count += 1
}
/// Based on the information here, distributes excess inline-size to the given column per
/// INTRINSIC § 4.4.
///
/// `#[inline]` so the compiler will hoist out the branch, which is loop-invariant.
#[inline]
fn distribute_excess_inline_size_to_column(
&self,
intermediate_column_inline_size: &mut IntermediateColumnInlineSize,
column_intrinsic_inline_size: &ColumnIntrinsicInlineSize,
excess_inline_size: Au,
total_distributed_excess_size: &mut Au,
) {
let proportion =
if self.preferred_inline_size_of_nonconstrained_columns_with_no_percentage > Au(0) {
// FIXME(spec, pcwalton): Gecko and WebKit do *something* here when there are
// nonconstrained columns with no percentage *and* no preferred width. What do they
// do?
if !column_intrinsic_inline_size.constrained &&
column_intrinsic_inline_size.percentage == 0.0
{
column_intrinsic_inline_size.preferred.to_f32_px() / self
.preferred_inline_size_of_nonconstrained_columns_with_no_percentage
.to_f32_px()
} else {
0.0
}
} else if self.count_of_nonconstrained_columns_with_no_percentage > 0 {
1.0 / (self.count_of_nonconstrained_columns_with_no_percentage as CSSFloat)
} else if self.preferred_inline_size_of_constrained_columns_with_no_percentage > Au(0) {
column_intrinsic_inline_size.preferred.to_f32_px() / self
.preferred_inline_size_of_constrained_columns_with_no_percentage
.to_f32_px()
} else if self.total_percentage > 0.0 {
column_intrinsic_inline_size.percentage / self.total_percentage
} else {
1.0 / (self.column_count as CSSFloat)
};
// The `min` here has the effect of throwing away fractional excess at the end of the
// table.
let amount_to_distribute = min(
excess_inline_size.scale_by(proportion),
excess_inline_size - *total_distributed_excess_size,
);
*total_distributed_excess_size = *total_distributed_excess_size + amount_to_distribute;
intermediate_column_inline_size.size =
intermediate_column_inline_size.size + amount_to_distribute
}
}
/// An intermediate column size assignment.
struct IntermediateColumnInlineSize {
size: Au,
percentage: f32,
}
/// Returns the computed inline size of the table wrapper represented by `block`.
///
/// `table_border_padding` is the sum of the sizes of all border and padding in the inline
/// direction of the table contained within this table wrapper.
fn initial_computed_inline_size(
block: &mut BlockFlow,
containing_block_inline_size: Au,
minimum_width_of_all_columns: Au,
preferred_width_of_all_columns: Au,
table_border_padding: Au,
) -> MaybeAuto {
let inline_size_from_style = MaybeAuto::from_style(
block.fragment.style.content_inline_size(),
containing_block_inline_size,
);
match inline_size_from_style {
MaybeAuto::Auto => {
if preferred_width_of_all_columns + table_border_padding <= containing_block_inline_size
{
MaybeAuto::Specified(preferred_width_of_all_columns + table_border_padding)
} else if minimum_width_of_all_columns > containing_block_inline_size {
MaybeAuto::Specified(minimum_width_of_all_columns)
} else {
MaybeAuto::Auto
}
},
MaybeAuto::Specified(inline_size_from_style) => MaybeAuto::Specified(max(
inline_size_from_style - table_border_padding,
minimum_width_of_all_columns,
)),
}
}
struct Table {
minimum_width_of_all_columns: Au,
preferred_width_of_all_columns: Au,
table_border_padding: Au,
}
impl ISizeAndMarginsComputer for Table {
fn compute_border_and_padding(&self, block: &mut BlockFlow, containing_block_inline_size: Au) {
block
.fragment
.compute_border_and_padding(containing_block_inline_size)
}
fn initial_computed_inline_size(
&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
shared_context: &SharedStyleContext,
) -> MaybeAuto {
let containing_block_inline_size =
self.containing_block_inline_size(block, parent_flow_inline_size, shared_context);
initial_computed_inline_size(
block,
containing_block_inline_size,
self.minimum_width_of_all_columns,
self.preferred_width_of_all_columns,
self.table_border_padding,
)
}
fn solve_inline_size_constraints(
&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput,
) -> ISizeConstraintSolution {
self.solve_block_inline_size_constraints(block, input)
}
}
struct FloatedTable {
minimum_width_of_all_columns: Au,
preferred_width_of_all_columns: Au,
table_border_padding: Au,
}
impl ISizeAndMarginsComputer for FloatedTable {
fn compute_border_and_padding(&self, block: &mut BlockFlow, containing_block_inline_size: Au) {
block
.fragment
.compute_border_and_padding(containing_block_inline_size)
}
fn initial_computed_inline_size(
&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
shared_context: &SharedStyleContext,
) -> MaybeAuto {
let containing_block_inline_size =
self.containing_block_inline_size(block, parent_flow_inline_size, shared_context);
initial_computed_inline_size(
block,
containing_block_inline_size,
self.minimum_width_of_all_columns,
self.preferred_width_of_all_columns,
self.table_border_padding,
)
}
fn solve_inline_size_constraints(
&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput,
) -> ISizeConstraintSolution {
FloatNonReplaced.solve_inline_size_constraints(block, input)
}
}
struct AbsoluteTable {
minimum_width_of_all_columns: Au,
preferred_width_of_all_columns: Au,
table_border_padding: Au,
}
impl ISizeAndMarginsComputer for AbsoluteTable {
fn compute_border_and_padding(&self, block: &mut BlockFlow, containing_block_inline_size: Au) {
block
.fragment
.compute_border_and_padding(containing_block_inline_size)
}
fn initial_computed_inline_size(
&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
shared_context: &SharedStyleContext,
) -> MaybeAuto {
let containing_block_inline_size =
self.containing_block_inline_size(block, parent_flow_inline_size, shared_context);
initial_computed_inline_size(
block,
containing_block_inline_size,
self.minimum_width_of_all_columns,
self.preferred_width_of_all_columns,
self.table_border_padding,
)
}
fn containing_block_inline_size(
&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
shared_context: &SharedStyleContext,
) -> Au {
AbsoluteNonReplaced.containing_block_inline_size(
block,
parent_flow_inline_size,
shared_context,
)
}
fn solve_inline_size_constraints(
&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput,
) -> ISizeConstraintSolution {
AbsoluteNonReplaced.solve_inline_size_constraints(block, input)
}
fn set_inline_position_of_flow_if_necessary(
&self,
block: &mut BlockFlow,
solution: ISizeConstraintSolution,
) {
AbsoluteNonReplaced.set_inline_position_of_flow_if_necessary(block, solution);
}
}<|fim▁end|> | |
<|file_name|>character_count.py<|end_file_name|><|fim▁begin|>from collections import defaultdict
import codecs
def count(corpus, output_file):
debug = False
dic = defaultdict(int)
other = set()<|fim▁hole|> if len(word) % 3 == 0:
for i in xrange(len(word) / 3):
dic[word[i:i+3]] += 1
else:
other.add(word)
fout.write('%i %i\n' % (len(dic), len(other)))
record_list = [(y, x) for x, y in dic.items()]
record_list.sort()
record_list.reverse()
i = 0
for x, y in record_list:
#print y.decode('utf8'), x
try:
yy = y.decode('GBK')
except:
print y
yy = 'N/A'
fout.write('%s %i\n' % (yy, x))
i += 1
if i > 10 and debug:
break
other_list = list(other)
other_list.sort()
for item in other_list:
#print item.decode('utf8')
item2 = item.decode('utf8')
fout.write(item2)
fout.write('\n')
i += 1
if i > 20 and debug:
break
fout.close()
if __name__ =='__main__':
count('data/train.zh_parsed', 'output/count.zh')
count('data/train.ja_parsed', 'output/count.ja')<|fim▁end|> | fout = codecs.open(output_file, 'w', 'utf8')
for line in open(corpus, 'r'):
words = line.split()
for word in words: |
<|file_name|>grep.py<|end_file_name|><|fim▁begin|># coding=utf-8
import os.path
import sys
import types
import getopt
from getopt import GetoptError
import text_file
import regex_utils
import string_utils as str_utils
def grep(target, pattern, number = False, model = 'e'):
'''
grep: print lines matching a pattern
@param target:string list or text file name
@param pattern: regex pattern or line number pattern or reduce function: bool=action(str)
@param number: with line number
@param model: s: substring model, e: regex model, n: line number model, a: action model
@summary: list= ['1:huiyugeng:male', '2:zhuzhu:male', '3:maomao:female']
print grep.grep(list, '^(?!.*female).*$', ':', [1])
output: ['huiyugeng', 'zhuzhu']
'''
if isinstance(target, basestring):
text = text_file.read_file(target)
elif isinstance(target, list):
text = target
else:
text = None
if not text:
return None
line_num = 1;
result = []
for line_text in text:
line_text = str(line_text)
if __match(line_num, line_text, model, pattern):
line_text = __print(line_num, line_text, number)
if line_text != None:
result.append(line_text)
line_num = line_num + 1
return result
def __match(line_num, line_text, model, pattern):
if str_utils.is_blank(line_text):
return False
if str_utils.is_blank(pattern):
return True
patterns = []
if type(pattern) == types.ListType:
patterns = pattern
elif type(pattern) == types.FunctionType:
patterns = [pattern]
else:
patterns = [str(pattern)]
if str_utils.is_empty(model) :
model = 's'
model = model.lower()
for match_pattern in patterns:
if model == 's':
if match_pattern in line_text:
return True
elif model == 'n':
_min, _max = __split_region(match_pattern)
if line_num >= _min and line_num <= _max:
return True
elif model == 'e':
if regex_utils.check_line(match_pattern, line_text):
return True
elif model == 'a':
if type(pattern) == types.FunctionType:
if pattern(line_text):
return True
return False
def __split_region(pattern):
if pattern.startswith('[') and pattern.endswith(']') and ',' in pattern:
region = pattern[1: len(pattern) - 1].split(',')<|fim▁hole|> return 0, 0
def __print(line, text, number):
if number:
return str(line) + ':' + text.strip()
else:
return text.strip()
def exec_cmd(argv):
try:
filename = None
pattern = None
number = False
model = 'e'
if len(argv) > 2:
opts, _ = getopt.getopt(argv[2:],'hf:p:nm:', ['help', '--file', '--pattern', '--number', '--model'])
for name, value in opts:
if name in ('-h', '--help'):
show_help()
if name in ('-f', '--file'):
filename = value
if name in ('-p', '--pattern'):
pattern = value
if name in ('-n', '--number'):
number = True
if name in ('-m', '--model'):
model = value
if str_utils.is_empty(filename) or not os.path.exists(filename):
print 'error : could not find file : ' + filename
sys.exit()
if str_utils.is_empty(pattern):
print 'error : pattern is empty'
sys.exit()
result = grep(filename, pattern, number, model)
if result and isinstance(result, list):
for line in result:
print line
else:
show_help()
except GetoptError, e:
print 'error : ' + e.msg
except Exception, e:
print 'error : ' + e.message
def show_help():
pass<|fim▁end|> | if region != None and len(region) == 2:
_min = int(region[0].strip())
_max = int(region[1].strip())
return _min, _max |
<|file_name|>abst_ecu.py<|end_file_name|><|fim▁begin|>from components.base.automotive_component import AutomotiveComponent
from config import project_registration as proj
from tools.ecu_logging import ECULogger as L
import random
class AbstractECU(AutomotiveComponent):
'''
This abstract class defines the interface of
an ECU as it is found in an automotive network
'''
def __init__(self, sim_env, ecu_id, data_rate):
''' Constructor
Input: sim_env simpy.Environment environment of this component
ecu_id string id of the corresponding AbstractECU
data_rate float datarate of the ecu
Output: -
'''
AutomotiveComponent.__init__(self, sim_env)
self._ABSTRACT_ECU = True
self._ecu_id = ecu_id # ID of the ECU
self.ecuSW = None # what is done
self.ecuHW = None # what is used to make it happen
self.MessageClass = proj.BUS_MSG_CLASS # what kind of messages are exchanged <|fim▁hole|> self._effective_bittime = 0 # seconds
self._jitter = 1
self.startup_delay = False
def set_startup_delay(self, start_time):
''' this method sets the startup delay. When this delay is set
this ECU is activated after the defined start time
Input: start_time float time when the ECU starts running
Output: -
'''
self.startup_delay = start_time
if start_time:
self.ecuHW.transceiver.ecu_is_active = False
def set_jitter(self, jitter_range):
''' sets the jitter which will be multiplied onto each
timeout value. It will be within jitter_range
e.g. jitter_range of 0.1 means that any random value
between 1.0 and 1.1 will be used
Input: jitter_range: float dispersion from 1.0
Output: -
'''
# determine jitter
self._jitter = 1 + (random.random() * jitter_range)
# apply jitter on layers
try: self.ecuSW.comm_mod.physical_lay.transceiver._jitter = self._jitter
except: pass
try: self.ecuSW.comm_mod._jitter = self._jitter
except: pass
try: self.ecuSW.comm_mod.transp_lay._jitter = self._jitter
except: pass
try: self.ecuSW.comm_mod.datalink_lay._jitter = self._jitter
except: pass
try: self.ecuSW.comm_mod.physical_lay.transceiver._jitter = self._jitter
except: pass
try: self.ecuSW.app_lay._jitter = self._jitter
except: pass
def _connect_hw_sw(self):
''' connect all hardware components with their
associated software connections
Input: -
Output: -
'''
# application Layer
self.ecuSW.app_lay.microcontroller = self.ecuHW.mic_controller
# physical and data link layer '''
self.ecuSW.comm_mod.datalink_lay.controller = self.ecuHW.controller
self.ecuSW.comm_mod.physical_lay.transceiver = self.ecuHW.transceiver
self.ecuSW.comm_mod.datalink_lay.effective_bittime = self._effective_bittime
def connect_to(self, bus):
''' connects the bus to the ECU
Input: bus CANBus Bus that will be connected
Output: -
'''
self.ecuHW.transceiver.connect_bus(bus)
self.connected_bus = bus
def get_type_id(self):
''' returns the id of this ECU type
Input: -
Output: ecu_type string type of this ECU; e.g.'TLSECU'
'''
raise NotImplementedError(" get_type_id() was not implemented by class %s" % self.__class__)
def get_rec_buffer_items(self):
''' returns the current content of the receiving buffer
Input: -
Output: rec_buffer list list of items in the receiving buffer
'''
return self.ecuHW.controller.receive_buffer.items
def get_trans_buffer_items(self):
''' returns the current content of the transmit buffer
Input: -
Output: trans_buffer list list of items in the transmit buffer
'''
return self.ecuHW.controller.transmit_buffer.items
def install_hw_filter(self, allowed_items_list):
''' installs a hardware filter that filters all
message ids that are not defined in the passed
list. This filter is applied on the transceiver
Input: allowed_items_list list list of message_ids that are let pass by the transceiver
Output: -
'''
try:
self.ecuHW.transceiver.install_filter(allowed_items_list)
except:
L().log_err(300)
def _GET_ABSTRACT_ECU(self):
''' marker that this is a AbstractECU '''
return self._ABSTRACT_ECU
@property
def ecu_id(self):
return self._ecu_id
@ecu_id.setter
def ecu_id(self, value):
self._ecu_id = value
def set_monitor(self, monitor):
self.monitor = monitor<|fim▁end|> | self.connected_bus = None # Bus that is connected to the ECU
self.data_rate = proj.BUS_ECU_DATARATE # Datarate with which bits are put on the bus
self._effective_datarate = 0 # Bit per second |
<|file_name|>user.py<|end_file_name|><|fim▁begin|>from typing import Optional
from lib.data import CustomFieldArgs
<|fim▁hole|> if args.field.lower() == 'user' or args.field.lower() == 'nick':
if args.nick:
return (args.prefix or '') + args.nick + (args.suffix or '')
else:
return args.default or ''
return None<|fim▁end|> | async def fieldUser(args: CustomFieldArgs) -> Optional[str]: |
<|file_name|>models.go<|end_file_name|><|fim▁begin|>// +build go1.9
// Copyright 2019 Microsoft Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This code was auto-generated by:
// github.com/Azure/azure-sdk-for-go/tools/profileBuilder
package scheduler
import (
"context"
original "github.com/Azure/azure-sdk-for-go/services/preview/scheduler/mgmt/2014-08-01-preview/scheduler"
)
const (
DefaultBaseURI = original.DefaultBaseURI
)
type DayOfWeek = original.DayOfWeek
const (
Friday DayOfWeek = original.Friday
Monday DayOfWeek = original.Monday
Saturday DayOfWeek = original.Saturday
Sunday DayOfWeek = original.Sunday
Thursday DayOfWeek = original.Thursday
Tuesday DayOfWeek = original.Tuesday
Wednesday DayOfWeek = original.Wednesday
)
type HTTPAuthenticationType = original.HTTPAuthenticationType
const (
ActiveDirectoryOAuth HTTPAuthenticationType = original.ActiveDirectoryOAuth
Basic HTTPAuthenticationType = original.Basic
ClientCertificate HTTPAuthenticationType = original.ClientCertificate
NotSpecified HTTPAuthenticationType = original.NotSpecified
)
type JobActionType = original.JobActionType
const (
HTTP JobActionType = original.HTTP
HTTPS JobActionType = original.HTTPS
ServiceBusQueue JobActionType = original.ServiceBusQueue
ServiceBusTopic JobActionType = original.ServiceBusTopic
StorageQueue JobActionType = original.StorageQueue
)
type JobCollectionState = original.JobCollectionState
const (
Deleted JobCollectionState = original.Deleted
Disabled JobCollectionState = original.Disabled
Enabled JobCollectionState = original.Enabled
Suspended JobCollectionState = original.Suspended
)
type JobExecutionStatus = original.JobExecutionStatus
const (
Completed JobExecutionStatus = original.Completed
Failed JobExecutionStatus = original.Failed
Postponed JobExecutionStatus = original.Postponed<|fim▁hole|>
const (
ErrorAction JobHistoryActionName = original.ErrorAction
MainAction JobHistoryActionName = original.MainAction
)
type JobScheduleDay = original.JobScheduleDay
const (
JobScheduleDayFriday JobScheduleDay = original.JobScheduleDayFriday
JobScheduleDayMonday JobScheduleDay = original.JobScheduleDayMonday
JobScheduleDaySaturday JobScheduleDay = original.JobScheduleDaySaturday
JobScheduleDaySunday JobScheduleDay = original.JobScheduleDaySunday
JobScheduleDayThursday JobScheduleDay = original.JobScheduleDayThursday
JobScheduleDayTuesday JobScheduleDay = original.JobScheduleDayTuesday
JobScheduleDayWednesday JobScheduleDay = original.JobScheduleDayWednesday
)
type JobState = original.JobState
const (
JobStateCompleted JobState = original.JobStateCompleted
JobStateDisabled JobState = original.JobStateDisabled
JobStateEnabled JobState = original.JobStateEnabled
JobStateFaulted JobState = original.JobStateFaulted
)
type RecurrenceFrequency = original.RecurrenceFrequency
const (
Day RecurrenceFrequency = original.Day
Hour RecurrenceFrequency = original.Hour
Minute RecurrenceFrequency = original.Minute
Month RecurrenceFrequency = original.Month
Week RecurrenceFrequency = original.Week
)
type RetryType = original.RetryType
const (
Fixed RetryType = original.Fixed
None RetryType = original.None
)
type ServiceBusAuthenticationType = original.ServiceBusAuthenticationType
const (
ServiceBusAuthenticationTypeNotSpecified ServiceBusAuthenticationType = original.ServiceBusAuthenticationTypeNotSpecified
ServiceBusAuthenticationTypeSharedAccessKey ServiceBusAuthenticationType = original.ServiceBusAuthenticationTypeSharedAccessKey
)
type ServiceBusTransportType = original.ServiceBusTransportType
const (
ServiceBusTransportTypeAMQP ServiceBusTransportType = original.ServiceBusTransportTypeAMQP
ServiceBusTransportTypeNetMessaging ServiceBusTransportType = original.ServiceBusTransportTypeNetMessaging
ServiceBusTransportTypeNotSpecified ServiceBusTransportType = original.ServiceBusTransportTypeNotSpecified
)
type SkuDefinition = original.SkuDefinition
const (
Free SkuDefinition = original.Free
Premium SkuDefinition = original.Premium
Standard SkuDefinition = original.Standard
)
type BaseClient = original.BaseClient
type BasicAuthentication = original.BasicAuthentication
type ClientCertAuthentication = original.ClientCertAuthentication
type HTTPAuthentication = original.HTTPAuthentication
type HTTPRequest = original.HTTPRequest
type JobAction = original.JobAction
type JobCollectionDefinition = original.JobCollectionDefinition
type JobCollectionListResult = original.JobCollectionListResult
type JobCollectionListResultIterator = original.JobCollectionListResultIterator
type JobCollectionListResultPage = original.JobCollectionListResultPage
type JobCollectionProperties = original.JobCollectionProperties
type JobCollectionQuota = original.JobCollectionQuota
type JobCollectionsClient = original.JobCollectionsClient
type JobDefinition = original.JobDefinition
type JobErrorAction = original.JobErrorAction
type JobHistoryDefinition = original.JobHistoryDefinition
type JobHistoryDefinitionProperties = original.JobHistoryDefinitionProperties
type JobHistoryFilter = original.JobHistoryFilter
type JobHistoryListResult = original.JobHistoryListResult
type JobHistoryListResultIterator = original.JobHistoryListResultIterator
type JobHistoryListResultPage = original.JobHistoryListResultPage
type JobListResult = original.JobListResult
type JobListResultIterator = original.JobListResultIterator
type JobListResultPage = original.JobListResultPage
type JobMaxRecurrence = original.JobMaxRecurrence
type JobProperties = original.JobProperties
type JobRecurrence = original.JobRecurrence
type JobRecurrenceSchedule = original.JobRecurrenceSchedule
type JobRecurrenceScheduleMonthlyOccurrence = original.JobRecurrenceScheduleMonthlyOccurrence
type JobStateFilter = original.JobStateFilter
type JobStatus = original.JobStatus
type JobsClient = original.JobsClient
type OAuthAuthentication = original.OAuthAuthentication
type RetryPolicy = original.RetryPolicy
type ServiceBusAuthentication = original.ServiceBusAuthentication
type ServiceBusBrokeredMessageProperties = original.ServiceBusBrokeredMessageProperties
type ServiceBusMessage = original.ServiceBusMessage
type ServiceBusQueueMessage = original.ServiceBusQueueMessage
type ServiceBusTopicMessage = original.ServiceBusTopicMessage
type Sku = original.Sku
type StorageQueueMessage = original.StorageQueueMessage
func New(subscriptionID string) BaseClient {
return original.New(subscriptionID)
}
func NewJobCollectionListResultIterator(page JobCollectionListResultPage) JobCollectionListResultIterator {
return original.NewJobCollectionListResultIterator(page)
}
func NewJobCollectionListResultPage(getNextPage func(context.Context, JobCollectionListResult) (JobCollectionListResult, error)) JobCollectionListResultPage {
return original.NewJobCollectionListResultPage(getNextPage)
}
func NewJobCollectionsClient(subscriptionID string) JobCollectionsClient {
return original.NewJobCollectionsClient(subscriptionID)
}
func NewJobCollectionsClientWithBaseURI(baseURI string, subscriptionID string) JobCollectionsClient {
return original.NewJobCollectionsClientWithBaseURI(baseURI, subscriptionID)
}
func NewJobHistoryListResultIterator(page JobHistoryListResultPage) JobHistoryListResultIterator {
return original.NewJobHistoryListResultIterator(page)
}
func NewJobHistoryListResultPage(getNextPage func(context.Context, JobHistoryListResult) (JobHistoryListResult, error)) JobHistoryListResultPage {
return original.NewJobHistoryListResultPage(getNextPage)
}
func NewJobListResultIterator(page JobListResultPage) JobListResultIterator {
return original.NewJobListResultIterator(page)
}
func NewJobListResultPage(getNextPage func(context.Context, JobListResult) (JobListResult, error)) JobListResultPage {
return original.NewJobListResultPage(getNextPage)
}
func NewJobsClient(subscriptionID string) JobsClient {
return original.NewJobsClient(subscriptionID)
}
func NewJobsClientWithBaseURI(baseURI string, subscriptionID string) JobsClient {
return original.NewJobsClientWithBaseURI(baseURI, subscriptionID)
}
func NewWithBaseURI(baseURI string, subscriptionID string) BaseClient {
return original.NewWithBaseURI(baseURI, subscriptionID)
}
func PossibleDayOfWeekValues() []DayOfWeek {
return original.PossibleDayOfWeekValues()
}
func PossibleHTTPAuthenticationTypeValues() []HTTPAuthenticationType {
return original.PossibleHTTPAuthenticationTypeValues()
}
func PossibleJobActionTypeValues() []JobActionType {
return original.PossibleJobActionTypeValues()
}
func PossibleJobCollectionStateValues() []JobCollectionState {
return original.PossibleJobCollectionStateValues()
}
func PossibleJobExecutionStatusValues() []JobExecutionStatus {
return original.PossibleJobExecutionStatusValues()
}
func PossibleJobHistoryActionNameValues() []JobHistoryActionName {
return original.PossibleJobHistoryActionNameValues()
}
func PossibleJobScheduleDayValues() []JobScheduleDay {
return original.PossibleJobScheduleDayValues()
}
func PossibleJobStateValues() []JobState {
return original.PossibleJobStateValues()
}
func PossibleRecurrenceFrequencyValues() []RecurrenceFrequency {
return original.PossibleRecurrenceFrequencyValues()
}
func PossibleRetryTypeValues() []RetryType {
return original.PossibleRetryTypeValues()
}
func PossibleServiceBusAuthenticationTypeValues() []ServiceBusAuthenticationType {
return original.PossibleServiceBusAuthenticationTypeValues()
}
func PossibleServiceBusTransportTypeValues() []ServiceBusTransportType {
return original.PossibleServiceBusTransportTypeValues()
}
func PossibleSkuDefinitionValues() []SkuDefinition {
return original.PossibleSkuDefinitionValues()
}
func UserAgent() string {
return original.UserAgent() + " profiles/preview"
}
func Version() string {
return original.Version()
}<|fim▁end|> | )
type JobHistoryActionName = original.JobHistoryActionName |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>"use strict"
const messages = require("..").messages
const ruleName = require("..").ruleName
const rules = require("../../../rules")
const rule = rules[ruleName]
testRule(rule, {
ruleName,
config: ["always"],
accept: [ {
code: "a { color :pink }",
description: "space only before",
}, {
code: "a { color : pink }",
description: "space before and after",
}, {
code: "a { color :\npink }",
description: "space before and newline after",
}, {
code: "a { color :\r\npink }",
description: "space before and CRLF after",
}, {
code: "$map:(key:value)",
description: "SCSS map with no newlines",
}, {
code: "$list:('value1', 'value2')",
description: "SCSS list with no newlines",
}, {
code: "a { background : url(data:application/font-woff;...); }",
description: "data URI",
} ],
reject: [ {
code: "a { color: pink; }",
description: "no space before",
message: messages.expectedBefore(),
line: 1,
column: 11,
}, {
code: "a { color : pink; }",
description: "two spaces before",
message: messages.expectedBefore(),
line: 1,
column: 11,
}, {
code: "a { color\t: pink; }",
description: "tab before",
message: messages.expectedBefore(),
line: 1,
column: 11,
}, {
code: "a { color\n: pink; }",
description: "newline before",
message: messages.expectedBefore(),
line: 2,
column: 1,
}, {
code: "a { color\r\n: pink; }",
description: "CRLF before",
message: messages.expectedBefore(),
line: 1,
column: 11,
} ],
})
testRule(rule, {
ruleName,
config: ["never"],
accept: [ {
code: "a { color:pink }",
description: "no space before and after",<|fim▁hole|> }, {
code: "a { color:\npink }",
description: "no space before and newline after",
}, {
code: "a { color:\r\npink }",
description: "no space before and CRLF after",
}, {
code: "$map :(key :value)",
description: "SCSS map with no newlines",
} ],
reject: [ {
code: "a { color : pink; }",
description: "space before",
message: messages.rejectedBefore(),
line: 1,
column: 11,
}, {
code: "a { color : pink; }",
description: "two spaces before",
message: messages.rejectedBefore(),
line: 1,
column: 11,
}, {
code: "a { color\t: pink; }",
description: "tab before",
message: messages.rejectedBefore(),
line: 1,
column: 11,
}, {
code: "a { color\n: pink; }",
description: "newline before",
message: messages.rejectedBefore(),
line: 2,
column: 1,
}, {
code: "a { color\r\n: pink; }",
description: "CRLF before",
message: messages.rejectedBefore(),
line: 1,
column: 11,
} ],
})<|fim▁end|> | }, {
code: "a { color: pink }",
description: "no space before and space after", |
<|file_name|>invert_slip_fixed_azimuth.py<|end_file_name|><|fim▁begin|>"""
Restores the uplifted horizons while restricting slip along the fault to the
specified azimuth.
"""
import numpy as np
import matplotlib.pyplot as plt
from fault_kinematics.homogeneous_simple_shear import invert_slip
import data
import basic
def main():
azimuth = data.fault_strike + 90
# azimuth = 304 # Plate motion from Loveless & Meade
def func(*args, **kwargs):
return forced_direction_inversion(azimuth, *args, **kwargs)
slips, heaves, variances, planar_variances = basic.restore_horizons(func)
basic.plot_restored_locations(slips, heaves)
plt.show()<|fim▁hole|> """Forces the inversion to only consider slip along the given azimuth."""
azimuth = np.radians(90 - azimuth)
dx, dy = np.cos(azimuth), np.sin(azimuth)
direc = [[dx, dy], [dx, dy]]
return invert_slip(fault, xyz, alpha, direc=direc, **kwargs)
if __name__ == '__main__':
main()<|fim▁end|> |
def forced_direction_inversion(azimuth, fault, xyz, alpha, **kwargs): |
<|file_name|>test_multicast.rs<|end_file_name|><|fim▁begin|>use mio::*;
use mio::deprecated::{EventLoop, Handler};
use mio::udp::*;
use bytes::{Buf, MutBuf, RingBuf, SliceBuf};
use std::str;
use std::net::{SocketAddr, Ipv4Addr};
use localhost;
const LISTENER: Token = Token(0);
const SENDER: Token = Token(1);
pub struct UdpHandler {
tx: UdpSocket,<|fim▁hole|> msg: &'static str,
buf: SliceBuf<'static>,
rx_buf: RingBuf
}
impl UdpHandler {
fn new(tx: UdpSocket, rx: UdpSocket, msg: &'static str) -> UdpHandler {
UdpHandler {
tx: tx,
rx: rx,
msg: msg,
buf: SliceBuf::wrap(msg.as_bytes()),
rx_buf: RingBuf::new(1024)
}
}
fn handle_read(&mut self, event_loop: &mut EventLoop<UdpHandler>, token: Token, _: Ready) {
match token {
LISTENER => {
debug!("We are receiving a datagram now...");
match unsafe { self.rx.recv_from(self.rx_buf.mut_bytes()) } {
Ok(Some((cnt, SocketAddr::V4(addr)))) => {
unsafe { MutBuf::advance(&mut self.rx_buf, cnt); }
assert_eq!(*addr.ip(), Ipv4Addr::new(127, 0, 0, 1));
}
_ => panic!("unexpected result"),
}
assert!(str::from_utf8(self.rx_buf.bytes()).unwrap() == self.msg);
event_loop.shutdown();
},
_ => ()
}
}
fn handle_write(&mut self, _: &mut EventLoop<UdpHandler>, token: Token, _: Ready) {
match token {
SENDER => {
let addr = self.rx.local_addr().unwrap();
let cnt = self.tx.send_to(self.buf.bytes(), &addr)
.unwrap().unwrap();
self.buf.advance(cnt);
},
_ => ()
}
}
}
impl Handler for UdpHandler {
type Timeout = usize;
type Message = ();
fn ready(&mut self, event_loop: &mut EventLoop<UdpHandler>, token: Token, events: Ready) {
if events.is_readable() {
self.handle_read(event_loop, token, events);
}
if events.is_writable() {
self.handle_write(event_loop, token, events);
}
}
}
#[test]
pub fn test_multicast() {
debug!("Starting TEST_UDP_CONNECTIONLESS");
let mut event_loop = EventLoop::new().unwrap();
let addr = localhost();
let any = "0.0.0.0:0".parse().unwrap();
let tx = UdpSocket::bind(&any).unwrap();
let rx = UdpSocket::bind(&addr).unwrap();
info!("Joining group 227.1.1.100");
let any = "0.0.0.0".parse().unwrap();
rx.join_multicast_v4(&"227.1.1.100".parse().unwrap(), &any).unwrap();
info!("Joining group 227.1.1.101");
rx.join_multicast_v4(&"227.1.1.101".parse().unwrap(), &any).unwrap();
info!("Registering SENDER");
event_loop.register(&tx, SENDER, Ready::writable(), PollOpt::edge()).unwrap();
info!("Registering LISTENER");
event_loop.register(&rx, LISTENER, Ready::readable(), PollOpt::edge()).unwrap();
info!("Starting event loop to test with...");
event_loop.run(&mut UdpHandler::new(tx, rx, "hello world")).unwrap();
}<|fim▁end|> | rx: UdpSocket, |
<|file_name|>dockerclient.go<|end_file_name|><|fim▁begin|>package dockerclient
import (
"bytes"
"crypto/tls"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"strconv"
"strings"
"sync/atomic"
"time"
)
const (
APIVersion = "v1.15"
)
var (
ErrNotFound = errors.New("Not found")
defaultTimeout = 30 * time.Second
)
type DockerClient struct {
URL *url.URL
HTTPClient *http.Client
TLSConfig *tls.Config
monitorEvents int32
}
type Error struct {
StatusCode int
Status string
msg string
}
func (e Error) Error() string {
return fmt.Sprintf("%s: %s", e.Status, e.msg)
}
func NewDockerClient(daemonUrl string, tlsConfig *tls.Config) (*DockerClient, error) {
return NewDockerClientTimeout(daemonUrl, tlsConfig, time.Duration(defaultTimeout))
}
func NewDockerClientTimeout(daemonUrl string, tlsConfig *tls.Config, timeout time.Duration) (*DockerClient, error) {
u, err := url.Parse(daemonUrl)
if err != nil {
return nil, err
}
if u.Scheme == "" || u.Scheme == "tcp" {
if tlsConfig == nil {
u.Scheme = "http"
} else {
u.Scheme = "https"
}
}
httpClient := newHTTPClient(u, tlsConfig, timeout)
return &DockerClient{u, httpClient, tlsConfig, 0}, nil
}
func (client *DockerClient) doRequest(method string, path string, body []byte, headers map[string]string) ([]byte, error) {
b := bytes.NewBuffer(body)
req, err := http.NewRequest(method, client.URL.String()+path, b)
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", "application/json")
if headers != nil {
for header, value := range headers {
req.Header.Add(header, value)
}
}
resp, err := client.HTTPClient.Do(req)
if err != nil {
if !strings.Contains(err.Error(), "connection refused") && client.TLSConfig == nil {
return nil, fmt.Errorf("%v. Are you trying to connect to a TLS-enabled daemon without TLS?", err)
}
return nil, err
}
defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
if resp.StatusCode == 404 {
return nil, ErrNotFound
}
if resp.StatusCode >= 400 {
return nil, Error{StatusCode: resp.StatusCode, Status: resp.Status, msg: string(data)}
}
return data, nil
}
func (client *DockerClient) Info() (*Info, error) {
uri := fmt.Sprintf("/%s/info", APIVersion)
data, err := client.doRequest("GET", uri, nil, nil)
if err != nil {
return nil, err
}
ret := &Info{}
err = json.Unmarshal(data, &ret)
if err != nil {
return nil, err
}
return ret, nil
}
func (client *DockerClient) ListContainers(all bool, size bool, filters string) ([]Container, error) {
argAll := 0
if all == true {
argAll = 1
}
showSize := 0
if size == true {
showSize = 1
}
uri := fmt.Sprintf("/%s/containers/json?all=%d&size=%d", APIVersion, argAll, showSize)
if filters != "" {
uri += "&filters=" + filters
}
data, err := client.doRequest("GET", uri, nil, nil)
if err != nil {
return nil, err
}
ret := []Container{}
err = json.Unmarshal(data, &ret)
if err != nil {
return nil, err
}
return ret, nil
}
func (client *DockerClient) InspectContainer(id string) (*ContainerInfo, error) {
uri := fmt.Sprintf("/%s/containers/%s/json", APIVersion, id)
data, err := client.doRequest("GET", uri, nil, nil)
if err != nil {
return nil, err
}
info := &ContainerInfo{}
err = json.Unmarshal(data, info)
if err != nil {
return nil, err
}
return info, nil
}
func (client *DockerClient) CreateContainer(config *ContainerConfig, name string) (string, error) {
data, err := json.Marshal(config)
if err != nil {
return "", err
}
uri := fmt.Sprintf("/%s/containers/create", APIVersion)
if name != "" {
v := url.Values{}
v.Set("name", name)
uri = fmt.Sprintf("%s?%s", uri, v.Encode())
}
data, err = client.doRequest("POST", uri, data, nil)
if err != nil {
return "", err
}
result := &RespContainersCreate{}
err = json.Unmarshal(data, result)
if err != nil {
return "", err
}
return result.Id, nil
}
func (client *DockerClient) ContainerLogs(id string, options *LogOptions) (io.ReadCloser, error) {
v := url.Values{}
v.Add("follow", strconv.FormatBool(options.Follow))
v.Add("stdout", strconv.FormatBool(options.Stdout))
v.Add("stderr", strconv.FormatBool(options.Stderr))
v.Add("timestamps", strconv.FormatBool(options.Timestamps))
if options.Tail > 0 {
v.Add("tail", strconv.FormatInt(options.Tail, 10))
}
uri := fmt.Sprintf("/%s/containers/%s/logs?%s", APIVersion, id, v.Encode())
req, err := http.NewRequest("GET", client.URL.String()+uri, nil)
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", "application/json")
resp, err := client.HTTPClient.Do(req)
if err != nil {
return nil, err
}
return resp.Body, nil
}
func (client *DockerClient) StartContainer(id string, config *HostConfig) error {
data, err := json.Marshal(config)
if err != nil {
return err
}
uri := fmt.Sprintf("/%s/containers/%s/start", APIVersion, id)
_, err = client.doRequest("POST", uri, data, nil)
if err != nil {
return err
}
return nil
}
func (client *DockerClient) StopContainer(id string, timeout int) error {
uri := fmt.Sprintf("/%s/containers/%s/stop?t=%d", APIVersion, id, timeout)
_, err := client.doRequest("POST", uri, nil, nil)
if err != nil {
return err
}
return nil
}
func (client *DockerClient) RestartContainer(id string, timeout int) error {
uri := fmt.Sprintf("/%s/containers/%s/restart?t=%d", APIVersion, id, timeout)
_, err := client.doRequest("POST", uri, nil, nil)
if err != nil {
return err
}
return nil
}
<|fim▁hole|> uri := fmt.Sprintf("/%s/containers/%s/kill?signal=%s", APIVersion, id, signal)
_, err := client.doRequest("POST", uri, nil, nil)
if err != nil {
return err
}
return nil
}
func (client *DockerClient) StartMonitorEvents(cb Callback, ec chan error, args ...interface{}) {
atomic.StoreInt32(&client.monitorEvents, 1)
go client.getEvents(cb, ec, args...)
}
func (client *DockerClient) getEvents(cb Callback, ec chan error, args ...interface{}) {
uri := fmt.Sprintf("%s/%s/events", client.URL.String(), APIVersion)
resp, err := client.HTTPClient.Get(uri)
if err != nil {
ec <- err
return
}
defer resp.Body.Close()
dec := json.NewDecoder(resp.Body)
for atomic.LoadInt32(&client.monitorEvents) > 0 {
var event *Event
if err := dec.Decode(&event); err != nil {
ec <- err
return
}
cb(event, ec, args...)
}
}
func (client *DockerClient) StopAllMonitorEvents() {
atomic.StoreInt32(&client.monitorEvents, 0)
}
func (client *DockerClient) Version() (*Version, error) {
uri := fmt.Sprintf("/%s/version", APIVersion)
data, err := client.doRequest("GET", uri, nil, nil)
if err != nil {
return nil, err
}
version := &Version{}
err = json.Unmarshal(data, version)
if err != nil {
return nil, err
}
return version, nil
}
func (client *DockerClient) PullImage(name string, auth *AuthConfig) error {
v := url.Values{}
v.Set("fromImage", name)
uri := fmt.Sprintf("/%s/images/create?%s", APIVersion, v.Encode())
req, err := http.NewRequest("POST", client.URL.String()+uri, nil)
if auth != nil {
req.Header.Add("X-Registry-Auth", auth.encode())
}
resp, err := client.HTTPClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
var finalObj map[string]interface{}
for decoder := json.NewDecoder(resp.Body); err == nil; err = decoder.Decode(&finalObj) {
}
if err != io.EOF {
return err
}
if err, ok := finalObj["error"]; ok {
return fmt.Errorf("%v", err)
}
return nil
}
func (client *DockerClient) RemoveContainer(id string, force, volumes bool) error {
argForce := 0
argVolumes := 0
if force == true {
argForce = 1
}
if volumes == true {
argVolumes = 1
}
args := fmt.Sprintf("force=%d&v=%d", argForce, argVolumes)
uri := fmt.Sprintf("/%s/containers/%s?%s", APIVersion, id, args)
_, err := client.doRequest("DELETE", uri, nil, nil)
return err
}
func (client *DockerClient) ListImages() ([]*Image, error) {
uri := fmt.Sprintf("/%s/images/json", APIVersion)
data, err := client.doRequest("GET", uri, nil, nil)
if err != nil {
return nil, err
}
var images []*Image
if err := json.Unmarshal(data, &images); err != nil {
return nil, err
}
return images, nil
}
func (client *DockerClient) RemoveImage(name string) error {
uri := fmt.Sprintf("/%s/images/%s", APIVersion, name)
_, err := client.doRequest("DELETE", uri, nil, nil)
return err
}
func (client *DockerClient) PauseContainer(id string) error {
uri := fmt.Sprintf("/%s/containers/%s/pause", APIVersion, id)
_, err := client.doRequest("POST", uri, nil, nil)
if err != nil {
return err
}
return nil
}
func (client *DockerClient) UnpauseContainer(id string) error {
uri := fmt.Sprintf("/%s/containers/%s/unpause", APIVersion, id)
_, err := client.doRequest("POST", uri, nil, nil)
if err != nil {
return err
}
return nil
}
func (client *DockerClient) Exec(config *ExecConfig) (string, error) {
data, err := json.Marshal(config)
if err != nil {
return "", err
}
uri := fmt.Sprintf("/containers/%s/exec", config.Container)
resp, err := client.doRequest("POST", uri, data, nil)
if err != nil {
return "", err
}
var createExecResp struct {
Id string
}
if err = json.Unmarshal(resp, &createExecResp); err != nil {
return "", err
}
uri = fmt.Sprintf("/exec/%s/start", createExecResp.Id)
resp, err = client.doRequest("POST", uri, data, nil)
if err != nil {
return "", err
}
return createExecResp.Id, nil
}<|fim▁end|> | func (client *DockerClient) KillContainer(id, signal string) error { |
<|file_name|>markers.py<|end_file_name|><|fim▁begin|># The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from numpy.random import random
from bokeh.plotting import *
def mscatter(p, x, y, typestr):
p.scatter(x, y, marker=typestr,<|fim▁hole|> line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12)
def mtext(p, x, y, textstr):
p.text(x, y, text=textstr,
text_color="#449944", text_align="center", text_font_size="10pt")
output_server("markers")
p = figure(title="markers.py example")
N = 10
mscatter(p, random(N)+2, random(N)+1, "circle")
mscatter(p, random(N)+4, random(N)+1, "square")
mscatter(p, random(N)+6, random(N)+1, "triangle")
mscatter(p, random(N)+8, random(N)+1, "asterisk")
mscatter(p, random(N)+2, random(N)+4, "circle_x")
mscatter(p, random(N)+4, random(N)+4, "square_x")
mscatter(p, random(N)+6, random(N)+4, "inverted_triangle")
mscatter(p, random(N)+8, random(N)+4, "x")
mscatter(p, random(N)+2, random(N)+7, "circle_cross")
mscatter(p, random(N)+4, random(N)+7, "square_cross")
mscatter(p, random(N)+6, random(N)+7, "diamond")
mscatter(p, random(N)+8, random(N)+7, "cross")
mtext(p, [2.5], [0.5], "circle / o")
mtext(p, [4.5], [0.5], "square")
mtext(p, [6.5], [0.5], "triangle")
mtext(p, [8.5], [0.5], "asterisk / *")
mtext(p, [2.5], [3.5], "circle_x / ox")
mtext(p, [4.5], [3.5], "square_x")
mtext(p, [6.5], [3.5], "inverted_triangle")
mtext(p, [8.5], [3.5], "x")
mtext(p, [2.5], [6.5], "circle_cross / o+")
mtext(p, [4.5], [6.5], "square_cross")
mtext(p, [6.5], [6.5], "diamond")
mtext(p, [8.5], [6.5], "cross / +")
show(p) # open a browser<|fim▁end|> | |
<|file_name|>test_adapter.py<|end_file_name|><|fim▁begin|>import numpy as np
from elephas.mllib.adapter import *
from pyspark.mllib.linalg import Matrices, Vectors
def test_to_matrix():
x = np.ones((4, 2))<|fim▁hole|>
def test_from_matrix():
mat = Matrices.dense(1, 2, [13, 37])
x = from_matrix(mat)
assert x.shape == (1, 2)
def test_from_vector():
x = np.ones((3,))
vector = to_vector(x)
assert len(vector) == 3
def test_to_vector():
vector = Vectors.dense([4, 2])
x = from_vector(vector)
assert x.shape == (2,)<|fim▁end|> | mat = to_matrix(x)
assert mat.numRows == 4
assert mat.numCols == 2 |
<|file_name|>notify.py<|end_file_name|><|fim▁begin|>"""Support for LaMetric notifications."""
import logging
from requests.exceptions import ConnectionError as RequestsConnectionError
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA, ATTR_TARGET, PLATFORM_SCHEMA, BaseNotificationService)
from homeassistant.const import CONF_ICON
import homeassistant.helpers.config_validation as cv
from . import DOMAIN as LAMETRIC_DOMAIN
REQUIREMENTS = ['lmnotify==0.0.4']
_LOGGER = logging.getLogger(__name__)
AVAILABLE_PRIORITIES = ['info', 'warning', 'critical']
CONF_CYCLES = 'cycles'
CONF_LIFETIME = 'lifetime'
CONF_PRIORITY = 'priority'
DEPENDENCIES = ['lametric']
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_ICON, default='a7956'): cv.string,
vol.Optional(CONF_LIFETIME, default=10): cv.positive_int,
vol.Optional(CONF_CYCLES, default=1): cv.positive_int,
vol.Optional(CONF_PRIORITY, default='warning'):
vol.In(AVAILABLE_PRIORITIES),
})
def get_service(hass, config, discovery_info=None):
"""Get the LaMetric notification service."""
hlmn = hass.data.get(LAMETRIC_DOMAIN)
return LaMetricNotificationService(
hlmn, config[CONF_ICON], config[CONF_LIFETIME] * 1000,
config[CONF_CYCLES], config[CONF_PRIORITY])
class LaMetricNotificationService(BaseNotificationService):
"""Implement the notification service for LaMetric."""
def __init__(self, hasslametricmanager, icon, lifetime, cycles, priority):
"""Initialize the service."""
self.hasslametricmanager = hasslametricmanager
self._icon = icon
self._lifetime = lifetime
self._cycles = cycles
self._priority = priority
self._devices = []
def send_message(self, message="", **kwargs):
"""Send a message to some LaMetric device."""
from lmnotify import SimpleFrame, Sound, Model
from oauthlib.oauth2 import TokenExpiredError
targets = kwargs.get(ATTR_TARGET)
data = kwargs.get(ATTR_DATA)
_LOGGER.debug("Targets/Data: %s/%s", targets, data)
icon = self._icon
cycles = self._cycles
sound = None
priority = self._priority
# Additional data?
if data is not None:
if "icon" in data:
icon = data["icon"]
if "sound" in data:
try:
sound = Sound(category="notifications",
sound_id=data["sound"])
_LOGGER.debug("Adding notification sound %s",
data["sound"])
except AssertionError:
_LOGGER.error("Sound ID %s unknown, ignoring",
data["sound"])
if "cycles" in data:
cycles = int(data['cycles'])
if "priority" in data:
if data['priority'] in AVAILABLE_PRIORITIES:
priority = data['priority']
else:
_LOGGER.warning("Priority %s invalid, using default %s",
data['priority'], priority)
text_frame = SimpleFrame(icon, message)
_LOGGER.debug("Icon/Message/Cycles/Lifetime: %s, %s, %d, %d",
icon, message, self._cycles, self._lifetime)
frames = [text_frame]
model = Model(frames=frames, cycles=cycles, sound=sound)
lmn = self.hasslametricmanager.manager
try:
self._devices = lmn.get_devices()
except TokenExpiredError:
_LOGGER.debug("Token expired, fetching new token")
lmn.get_token()
self._devices = lmn.get_devices()
except RequestsConnectionError:
_LOGGER.warning("Problem connecting to LaMetric, "
"using cached devices instead")
for dev in self._devices:
if targets is None or dev["name"] in targets:<|fim▁hole|> priority=priority)
_LOGGER.debug("Sent notification to LaMetric %s",
dev["name"])
except OSError:
_LOGGER.warning("Cannot connect to LaMetric %s",
dev["name"])<|fim▁end|> | try:
lmn.set_device(dev)
lmn.send_notification(model, lifetime=self._lifetime, |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Post'
db.create_table(u'blog_post', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=60)),
('body', self.gf('django.db.models.fields.TextField')()),
('tags', self.gf('django.db.models.fields.TextField')()),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'blog', ['Post'])
<|fim▁hole|>
def backwards(self, orm):
# Deleting model 'Post'
db.delete_table(u'blog_post')
models = {
u'blog.post': {
'Meta': {'object_name': 'Post'},
'body': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tags': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '60'})
}
}
complete_apps = ['blog']<|fim▁end|> | |
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>FusionCharts.ready(function () {
var salesChart = new FusionCharts({
type: 'bulb',
renderAt: 'chart-container',
id: 'myChart',
width: '200',
height: '200',
dataFormat: 'json',
dataSource: {
"chart": {
"caption": "Temperature status of deep freezers",
"upperlimit": "-5",
"lowerlimit": "-60",
"captionPadding":"30",
"showshadow":"0",
"showvalue": "1",
"useColorNameAsValue":"1",
"placeValuesInside":"1",
"valueFontSize": "16",
//Cosmetics
"baseFontColor" : "#333333",
"baseFont" : "Helvetica Neue,Arial",
"captionFontSize" : "14",
"showborder": "0",
"bgcolor": "#FFFFFF",
"toolTipColor" : "#ffffff",
"toolTipBorderThickness" : "0",
"toolTipBgColor" : "#000000",
"toolTipBgAlpha" : "80",
"toolTipBorderRadius" : "2",
"toolTipPadding" : "5",
},
"colorrange": {
"color": [
{
"minvalue": "-60",
"maxvalue": "-35",
"label": "Problem detected!",
"code": "#ff0000"
},
{
"minvalue": "-35",
"maxvalue": "-25",
"label": "Alert!",
"code": "#ff9900"
},
{
"minvalue": "-25",
"maxvalue": "-5",
"label": "All well!",
"code": "#00ff00"
}<|fim▁hole|> ]
},
"value": "-5"
},
"events":{
"rendered": function(evtObj, argObj){
setInterval(function () {
var num = (Math.floor(Math.random() * 55)*-1) -5;
FusionCharts("myChart").feedData("&value=" + num);
}, 10000);
}
}
});
salesChart.render();
});<|fim▁end|> | |
<|file_name|>bounds.py<|end_file_name|><|fim▁begin|>import sys
from PyFBA import lp, log_and_message
def reaction_bounds(reactions, reactions_with_upsr, media, lower=-1000.0, mid=0.0, upper=1000.0, verbose=False):
"""
Set the bounds for each reaction. We set the reactions to run between
either lower/mid, mid/upper, or lower/upper depending on whether the
reaction runs <=, =>, or <=> respectively.
:param reactions: The dict of all reactions we know about
:type reactions: dict of metabolism.Reaction
:param reactions_with_upsr: The sorted list of reactions to run
:type reactions_with_upsr: set
:param media: The media compounds
:type media: set
:param lower: The default lower bound
:type lower: float
:param mid: The default mid value (typically 0)
:type mid: float
:param upper: The default upper bound
:type upper: float
:return: A dict of the reaction ID and the tuple of bounds
:rtype: dict
"""
rbvals = {}
media_uptake_secretion_count = 0
other_uptake_secretion_count = 0
for r in reactions_with_upsr:
if r == 'BIOMASS_EQN':
rbvals[r] = (mid, upper)
continue
# if we already know the bounds, eg from an SBML file or from our uptake/secretion reactions
if reactions[r].lower_bound != None and reactions[r].upper_bound != None:
rbvals[r] = (reactions[r].lower_bound, reactions[r].upper_bound)
continue<|fim▁hole|> else:
sys.stderr.write("Did not find {} in reactions\n".format(r))
direction = "="
"""
RAE 16/6/21
We no longer use this block to check for media components. Instead, we us the uptake_and_secretion_reactions
in external_reactions.py to do so.
We assume that if you provide uptake_and_secretion_reactions you have already culled them for the media, though
perhaps we should add a test for that.
"""
if False and (reactions[r].is_uptake_secretion or reactions[r].is_transport or reactions[r].is_input_reaction()):
in_media = False
override = False # if we have external compounds that are not in the media, we don't want to run this as a media reaction
for c in reactions[r].left_compounds:
if c.location == 'e':
if c in media:
in_media = True
else:
override = True
# in this case, we have some external compounds that we should not import.
# for example, H+ is used to translocate things
if override:
in_media = False
if in_media:
# This is what I think it should be:
rbvals[r] = (lower, upper)
#rbvals[r] = (0.0, upper)
media_uptake_secretion_count += 1
else:
rbvals[r] = (0.0, upper)
#rbvals[r] = (lower, upper)
other_uptake_secretion_count += 1
continue
if direction == "=":
# This is what I think it should be:
rbvals[r] = (lower, upper)
# rbvals[r] = (mid, upper)
elif direction == ">":
# This is what I think it should be:
rbvals[r] = (mid, upper)
# rbvals[r] = (lower, upper)
elif direction == "<":
# This is what I think it should be:
# rbvals[r] = (lower, mid)
rbvals[r] = (lower, upper)
else:
sys.stderr.write("DO NOT UNDERSTAND DIRECTION " + direction + " for " + r + "\n")
rbvals[r] = (mid, upper)
if verbose:
sys.stderr.write("In parsing the bounds we found {} media uptake ".format(media_uptake_secretion_count) +
"and secretion reactions and {} other u/s reactions\n".format(other_uptake_secretion_count))
rbounds = [rbvals[r] for r in reactions_with_upsr]
for r in reactions_with_upsr:
if r in reactions:
reactions[r].lower_bound, reactions[r].upper_bound = rbvals[r]
lp.col_bounds(rbounds)
return rbvals
def compound_bounds(cp, lower=0, upper=0):
"""
Impose constraints on the compounds. These constraints limit what
the variation of each compound can be and is essentially 0 for
most compounds except those that are in the media or otherwise
external.
This is the zero flux vector.
Parameters:
cp: the list of compound ids
lower: the default lower value
upper: the default upper value
"""
cbounds = [(lower, upper) for c in cp]
cbvals = {c: (lower, upper) for c in cp}
lp.row_bounds(cbounds)
return cbvals<|fim▁end|> |
if r in reactions:
direction = reactions[r].direction |
<|file_name|>testEditDistance.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule testEditDistance
* @flow
* @typechecks
*
*/
/* eslint-disable no-shadow */
/**
* @internal
*
* Determines whether the edit distance between two strings is at or below the
* specified threshold distance, using the approach described by Ukkonen (1985)
* in "Algorithms for Approximate String Matching"[0] and then improved upon by
* Berghel and Roach (1996) in "An Extension of Ukkonen's Enhanced Dynamic
* Programming ASM Algorithm"[1].
*
* Given two strings of length `m` and `n` respectively, and threshold `t`,
* uses `O(t*min(m,n))` time and `O(min(t,m,n))` space.
*
* @see [0]: http://www.cs.helsinki.fi/u/ukkonen/InfCont85.PDF
* @see [1]: http://berghel.net/publications/asm/asm.pdf
*/
function testEditDistance(a: string, b: string, threshold: number): boolean {
// Ensure `b` is at least as long as `a`, swapping if necessary.
let m = a.length;
let n = b.length;
if (n < m) {
[n, m] = [m, n];
[b, a] = [a, b];
}
if (!m) {
return n <= threshold;
}
const zeroK = n;
const maxK = zeroK * 2 + 1;
const fkp = Array.from(Array(maxK), () => []);
for (let k = -zeroK; k < 0; k++) {
const p = -k - 1;
fkp[k + zeroK][p + 1] = -k - 1;
fkp[k + zeroK][p] = -Infinity;
}
fkp[zeroK][0] = -1;
for (let k = 1; k <= zeroK; k++) {
const p = k - 1;
fkp[k + zeroK][p + 1] = -1;
fkp[k + zeroK][p] = -Infinity;
}
// This loop is the alternative form suggested in the afterword of Berghel &
// Roach.
let p = n - m - 1;
do {
if (p > threshold) {
return false;
}
p++;
for (let i = Math.floor((p - (n - m)) / 2); i >= 1; i--) {
f(n - m + i, p - i);
}
for (let i = Math.floor((n - m + p) / 2); i >= 1; i--) {
f(n - m - i, p - i);
}
f(n - m, p);
} while (fkp[n - m + zeroK][p] !== m);
return true;
function f(k, p) {
let t = fkp[k + zeroK][p] + 1;
let t2 = t;<|fim▁hole|> // Check for transposed characters.
if (a[t - 1] === b[k + t] && a[t] === b[k + t - 1]) {
t2 = t + 1;
}
t = Math.max(
t,
fkp[k - 1 + zeroK][p],
fkp[k + 1 + zeroK][p] + 1,
t2
);
while (a[t] === b[t + k] && t < Math.min(m, n - k)) {
t++;
}
fkp[k + zeroK][p + 1] = t;
}
}
module.exports = testEditDistance;<|fim▁end|> | |
<|file_name|>borrowck-field-sensitivity.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
#![feature(box_syntax)]
struct A { a: int, b: Box<int> }
fn deref_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
drop(*x.b); //~ ERROR use of moved value: `*x.b`
}
fn deref_after_fu_move() {
let x = A { a: 1, b: box 2 };
let y = A { a: 3, .. x };
drop(*x.b); //~ ERROR use of moved value: `*x.b`
}
fn borrow_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
let p = &x.b; //~ ERROR use of moved value: `x.b`
drop(**p);
}
fn borrow_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
let p = &x.b; //~ ERROR use of moved value: `x.b`
drop(**p);
}
fn move_after_borrow() {
let x = A { a: 1, b: box 2 };
let p = &x.b;
drop(x.b); //~ ERROR cannot move out of `x.b` because it is borrowed
drop(**p);
}
fn fu_move_after_borrow() {
let x = A { a: 1, b: box 2 };
let p = &x.b;
let _y = A { a: 3, .. x }; //~ ERROR cannot move out of `x.b` because it is borrowed
drop(**p);
}
fn mut_borrow_after_mut_borrow() {
let mut x = A { a: 1, b: box 2 };
let p = &mut x.a;
let q = &mut x.a; //~ ERROR cannot borrow `x.a` as mutable more than once at a time
drop(*p);
drop(*q);
}
fn move_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
drop(x.b); //~ ERROR use of moved value: `x.b`
}
fn move_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
drop(x.b); //~ ERROR use of moved value: `x.b`
}
fn fu_move_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
let _z = A { a: 3, .. x }; //~ ERROR use of moved value: `x.b`
}
fn fu_move_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
let _z = A { a: 4, .. x }; //~ ERROR use of moved value: `x.b`
}
// The following functions aren't yet accepted, but they should be.
fn copy_after_field_assign_after_uninit() {
let mut x: A;
x.a = 1;
drop(x.a); //~ ERROR use of possibly uninitialized variable: `x.a`
}
fn borrow_after_field_assign_after_uninit() {
let mut x: A;
x.a = 1;
let p = &x.a; //~ ERROR use of possibly uninitialized variable: `x.a`
drop(*p);
}
fn move_after_field_assign_after_uninit() {
let mut x: A;
x.b = box 1;
drop(x.b); //~ ERROR use of possibly uninitialized variable: `x.b`
}
fn main() {
deref_after_move();
deref_after_fu_move();
borrow_after_move();
borrow_after_fu_move();
move_after_borrow();
fu_move_after_borrow();
mut_borrow_after_mut_borrow();
move_after_move();
move_after_fu_move();
fu_move_after_move();
fu_move_after_fu_move();
copy_after_field_assign_after_uninit();
borrow_after_field_assign_after_uninit();
move_after_field_assign_after_uninit();
}<|fim▁end|> | |
<|file_name|>16.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export { Moon16 as default } from "../../";<|fim▁end|> | |
<|file_name|>replace.py<|end_file_name|><|fim▁begin|>#This script is for produsing a new list of sites extracted from alexa top site list
import re
prefix = 'http://'
#suffix = '</td><td></td></tr><tr><td>waitForPageToLoad</td><td></td><td>3000</td></tr>'<|fim▁hole|> line=found
newlines.append(line.replace(',', ''))
with open('urls.txt', 'w') as f:
for line in newlines:
#f.write('%s%s%s\n' % (prefix, line.rstrip('\n'), suffix))
f.write('%s%s\n' % (prefix, line.rstrip('\n')))<|fim▁end|> | with open('top100_alexa.txt','r') as f:
newlines = []
for line in f.readlines():
found=re.sub(r'\d+', '', line) |
<|file_name|>BinaryName.java<|end_file_name|><|fim▁begin|>package com.thomasjensen.checkstyle.addons.checks;
/*
* Checkstyle-Addons - Additional Checkstyle checks
* Copyright (c) 2015-2020, the Checkstyle Addons contributors
*
* This program is free software: you can redistribute it and/or modify it under the
* terms of the GNU General Public License, version 3, as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import net.jcip.annotations.Immutable;
/**
* Represents a Java binary class name for reference types, in the form of its fragments. This is the only way to tell
* the difference between a class called <code>A$B</code> and a class called <code>A</code> that has an inner class
* <code>B</code>.
*/
@Immutable
public final class BinaryName
{
private final String pkg;
private final List<String> cls;
/**
* Constructor.
*
* @param pPkg package name
* @param pOuterCls outer class simple name
* @param pInnerCls inner class simple names in descending order of their nesting
*/
public BinaryName(@Nullable final String pPkg, @Nonnull final String pOuterCls, @Nullable final String... pInnerCls)
{
pkg = pPkg;
List<String> nameList = new ArrayList<>();
if (pOuterCls != null) {
nameList.add(pOuterCls);
}
else {
throw new IllegalArgumentException("pOuterCls was null");
}
if (pInnerCls != null) {
for (final String inner : pInnerCls) {
nameList.add(inner);
}
}
cls = Collections.unmodifiableList(nameList);
}
/**
* Constructor.
*
* @param pPkg package name
* @param pClsNames class simple names in descending order of their nesting
*/
public BinaryName(@Nullable final String pPkg, @Nonnull final Collection<String> pClsNames)
{
pkg = pPkg;
if (pClsNames.size() == 0) {
throw new IllegalArgumentException("pClsNames is empty");
}
cls = Collections.unmodifiableList(new ArrayList<>(pClsNames));
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
if (pkg != null) {
sb.append(pkg);
sb.append('.');
}
for (final Iterator<String> iter = cls.iterator(); iter.hasNext();) {
sb.append(iter.next());
if (iter.hasNext()) {<|fim▁hole|> }
return sb.toString();
}
@Override
public boolean equals(final Object pOther)
{
if (this == pOther) {
return true;
}
if (pOther == null || getClass() != pOther.getClass()) {
return false;
}
BinaryName other = (BinaryName) pOther;
if (pkg != null ? !pkg.equals(other.pkg) : other.pkg != null) {
return false;
}
if (!cls.equals(other.cls)) {
return false;
}
return true;
}
@Override
public int hashCode()
{
int result = pkg != null ? pkg.hashCode() : 0;
result = 31 * result + cls.hashCode();
return result;
}
public String getPackage()
{
return pkg;
}
/**
* Getter.
*
* @return the simple name of the outer class (even if this binary name represents an inner class)
*/
public String getOuterSimpleName()
{
return cls.get(0);
}
/**
* Getter.
*
* @return the simple name of the inner class represented by this binary name. <code>null</code> if this binary name
* does not represent an inner class
*/
public String getInnerSimpleName()
{
return cls.size() > 1 ? cls.get(cls.size() - 1) : null;
}
/**
* The fully qualified name of the outer class.
*
* @return that, or <code>null</code> if the simple name of the outer class is unknown
*/
@CheckForNull
public String getOuterFqcn()
{
return (pkg != null ? (pkg + ".") : "") + getOuterSimpleName();
}
}<|fim▁end|> | sb.append('$');
} |
<|file_name|>EvictionDUnitTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.lucene;
import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.INDEX_NAME;
import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.REGION_NAME;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.stream.IntStream;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.Region;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.control.HeapMemoryMonitor;
import org.apache.geode.test.dunit.SerializableRunnableIF;
import org.apache.geode.test.junit.categories.LuceneTest;
@Category({LuceneTest.class})
@RunWith(JUnitParamsRunner.class)
public class EvictionDUnitTest extends LuceneQueriesAccessorBase {
protected static final float INITIAL_EVICTION_HEAP_PERCENTAGE = 50.9f;
protected static final float EVICTION_HEAP_PERCENTAGE_FAKE_NOTIFICATION = 85.0f;
protected static final int TEST_MAX_MEMORY = 100;
protected static final int MEMORY_USED_FAKE_NOTIFICATION = 90;
protected RegionTestableType[] getPartitionRedundantOverflowEvictionRegionType() {
return new RegionTestableType[] {
RegionTestableType.PARTITION_PERSISTENT_REDUNDANT_EVICTION_OVERFLOW};
}
protected RegionTestableType[] getPartitionRedundantLocalDestroyEvictionRegionType() {
return new RegionTestableType[] {RegionTestableType.PARTITION_REDUNDANT_EVICTION_LOCAL_DESTROY,
RegionTestableType.PARTITION_REDUNDANT_PERSISTENT_EVICTION_LOCAL_DESTROY,
RegionTestableType.PARTITION_EVICTION_LOCAL_DESTROY,
RegionTestableType.PARTITION_PERSISTENT_EVICTION_LOCAL_DESTROY};
}
@Test
@Parameters(method = "getPartitionRedundantLocalDestroyEvictionRegionType")
public void regionWithEvictionWithLocalDestroyMustNotbeAbleToCreateLuceneIndexes(
RegionTestableType regionTestType) {
SerializableRunnableIF createIndex = getSerializableRunnableIFCreateIndex();
dataStore1.invoke(() -> {
try {
initDataStore(createIndex, regionTestType);
} catch (UnsupportedOperationException e) {
assertEquals(
"Lucene indexes on regions with eviction and action local destroy are not supported",
e.getMessage());
assertNull(getCache().getRegion(REGION_NAME));
}
});
}<|fim▁hole|> LuceneService luceneService = LuceneServiceProvider.get(getCache());
luceneService.createIndexFactory().setFields("text").create(INDEX_NAME, REGION_NAME);
};
}
@Test
@Parameters(method = "getPartitionRedundantOverflowEvictionRegionType")
public void regionsWithEvictionWithOverflowMustBeAbleToCreateLuceneIndexes(
RegionTestableType regionTestType) {
SerializableRunnableIF createIndex = () -> {
LuceneService luceneService = LuceneServiceProvider.get(getCache());
luceneService.createIndexFactory().setFields("text").create(INDEX_NAME, REGION_NAME);
};
dataStore1.invoke(() -> initDataStore(createIndex, regionTestType));
accessor.invoke(() -> initDataStore(createIndex, regionTestType));
accessor.invoke(() -> {
Cache cache = getCache();
Region region = cache.getRegion(REGION_NAME);
IntStream.range(0, NUM_BUCKETS).forEach(i -> region.put(i, new TestObject("hello world")));
});
waitForFlushBeforeExecuteTextSearch(accessor, 60000);
dataStore1.invoke(() -> {
try {
getCache().getResourceManager().setEvictionHeapPercentage(INITIAL_EVICTION_HEAP_PERCENTAGE);
final PartitionedRegion partitionedRegion = (PartitionedRegion) getRootRegion(REGION_NAME);
raiseFakeNotification();
await().untilAsserted(() -> {
assertTrue(partitionedRegion.getDiskRegionStats().getNumOverflowOnDisk() > 0);
});
} finally {
cleanUpAfterFakeNotification();
}
});
accessor.invoke(() -> {
LuceneService luceneService = LuceneServiceProvider.get(getCache());
LuceneQuery<Integer, TestObject> query = luceneService.createLuceneQueryFactory()
.setLimit(100).create(INDEX_NAME, REGION_NAME, "world", "text");
List<LuceneResultStruct<Integer, TestObject>> resultList = query.findResults();
assertEquals(NUM_BUCKETS, resultList.size());
});
}
protected void raiseFakeNotification() {
((GemFireCacheImpl) getCache()).getHeapEvictor().setTestAbortAfterLoopCount(1);
HeapMemoryMonitor.setTestDisableMemoryUpdates(true);
getCache().getResourceManager()
.setEvictionHeapPercentage(EVICTION_HEAP_PERCENTAGE_FAKE_NOTIFICATION);
HeapMemoryMonitor heapMemoryMonitor =
((GemFireCacheImpl) getCache()).getInternalResourceManager().getHeapMonitor();
heapMemoryMonitor.setTestMaxMemoryBytes(TEST_MAX_MEMORY);
heapMemoryMonitor.updateStateAndSendEvent(MEMORY_USED_FAKE_NOTIFICATION, "test");
}
protected void cleanUpAfterFakeNotification() {
((GemFireCacheImpl) getCache()).getHeapEvictor().setTestAbortAfterLoopCount(Integer.MAX_VALUE);
HeapMemoryMonitor.setTestDisableMemoryUpdates(false);
}
}<|fim▁end|> |
private SerializableRunnableIF getSerializableRunnableIFCreateIndex() {
return () -> { |
<|file_name|>Convert_PostgreSQL_Native.java<|end_file_name|><|fim▁begin|>package org.compiere.dbPort;
import java.util.Collections;
import java.util.List;
/**
* Native PostgreSQL (pass-through) implementation of {@link Convert}
*
* @author tsa
*
*/
public final class Convert_PostgreSQL_Native extends Convert<|fim▁hole|>
@Override
protected final List<String> convertStatement(final String sqlStatement)
{
return Collections.singletonList(sqlStatement);
}
}<|fim▁end|> | { |
<|file_name|>test.array.js<|end_file_name|><|fim▁begin|>/* global describe, it, require */
'use strict';
// MODULES //
var // Expectation library:
chai = require( 'chai' ),
// Check whether an element is a finite number
isFiniteNumber = require( 'validate.io-finite' ),
// Module to be tested:
quantile = require( './../lib/array.js' );
// VARIABLES //
var expect = chai.expect,
assert = chai.assert;
// TESTS //
describe( 'array quantile', function tests() {
var validationData = require( './fixtures/array.json' ),
lambda = validationData.lambda;
<|fim▁hole|> expect( quantile ).to.be.a( 'function' );
});
it( 'should evaluate the quantile function', function test() {
var data, actual, expected, i;
data = validationData.data;
actual = new Array( data.length );
actual = quantile( actual, data, lambda );
expected = validationData.expected.map( function( d ) {
if (d === 'Inf' ) {
return Number.POSITIVE_INFINITY;
}
if ( d === '-Inf' ) {
return Number.NEGATIVE_INFINITY;
}
return d;
});
for ( i = 0; i < actual.length; i++ ) {
if ( isFiniteNumber( actual[ i ] ) && isFiniteNumber( expected[ i ] ) ) {
assert.closeTo( actual[ i ], expected[ i ], 1e-12 );
}
}
});
it( 'should return an empty array if provided an empty array', function test() {
assert.deepEqual( quantile( [], [], lambda ), [] );
});
it( 'should handle non-numeric values by setting the element to NaN', function test() {
var data, actual, expected;
data = [ true, null, [], {} ];
actual = new Array( data.length );
actual = quantile( actual, data, lambda );
expected = [ NaN, NaN, NaN, NaN ];
assert.deepEqual( actual, expected );
});
});<|fim▁end|> | it( 'should export a function', function test() { |
<|file_name|>webpack.base.babel.js<|end_file_name|><|fim▁begin|>/**
* COMMON WEBPACK CONFIGURATION
*/
const path = require('path');
const webpack = require('webpack');
module.exports = (options) => ({
entry: options.entry,
output: Object.assign({ // Compile into js/build.js
path: path.resolve(process.cwd(), 'build'),
publicPath: '/',
}, options.output), // Merge with env dependent settings
module: {
loaders: [{
test: /\.js$/, // Transform all .js files required somewhere with Babel
loader: 'babel',
exclude: /node_modules/,
query: options.babelQuery,
}, {
// Do not transform vendor's CSS with CSS-modules
// The point is that they remain in global scope.
// Since we require these CSS files in our JS or CSS files,
// they will be a part of our compilation either way.
// So, no need for ExtractTextPlugin here.
test: /\.css$/,
include: /node_modules/,
loaders: ['style-loader', 'css-loader'],
}, {
test: /\.(eot|svg|ttf|woff|woff2)$/,
loader: 'file-loader',
}, {
test: /\.(jpg|png|gif)$/,
loaders: [
'file-loader',
{
loader: 'image-webpack',
query: {
progressive: true,
optimizationLevel: 7,
interlaced: false,
pngquant: {
quality: '65-90',
speed: 4,
},
},
},
],
}, {
test: /\.html$/,
loader: 'html-loader',
}, {<|fim▁hole|> test: /\.json$/,
loader: 'json-loader',
}, {
test: /\.(mp4|webm)$/,
loader: 'url-loader',
query: {
limit: 10000,
},
}],
},
plugins: options.plugins.concat([
new webpack.ProvidePlugin({
// make fetch available
fetch: 'exports?self.fetch!whatwg-fetch',
}),
// Always expose NODE_ENV to webpack, in order to use `process.env.NODE_ENV`
// inside your code for any environment checks; UglifyJS will automatically
// drop any unreachable code.
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
},
}),
new webpack.NamedModulesPlugin(),
]),
resolve: {
modules: ['app', 'node_modules'],
extensions: [
'.js',
'.jsx',
'.react.js',
],
mainFields: [
'browser',
'jsnext:main',
'main',
],
},
devtool: options.devtool,
target: 'web', // Make web variables accessible to webpack, e.g. window
});<|fim▁end|> | |
<|file_name|>helper.py<|end_file_name|><|fim▁begin|>import pkg_resources
import unittest
def with_requires(*requirements):
"""Run a test case only when given requirements are satisfied.
<|fim▁hole|> .. admonition:: Example
This test case runs only when `numpy>=1.10` is installed.
>>> from chainer import testing
... class Test(unittest.TestCase):
... @testing.with_requires('numpy>=1.10')
... def test_for_numpy_1_10(self):
... pass
Args:
requirements: A list of string representing requirement condition to
run a given test case.
"""
ws = pkg_resources.WorkingSet()
try:
ws.require(*requirements)
skip = False
except pkg_resources.VersionConflict:
skip = True
msg = 'requires: {}'.format(','.join(requirements))
return unittest.skipIf(skip, msg)<|fim▁end|> | |
<|file_name|>systems.py<|end_file_name|><|fim▁begin|>import ecs
import raidersem
import components as comp
import constants as cst
import utils
import assets
import math
from sfml import sf
### Graphics ###
class DrawMap(ecs.System):
def __init__(self, window, mapObstacles, rs):
self.window = window
self.mapObstacles = mapObstacles
self.rs = rs
def update(self, em, eventManager, dt):
drawableMap = em.getEntitiesWithComponents([comp.DrawableMap])[0].component(comp.DrawableMap)
tilemap = drawableMap.tilemap
width = tilemap["width"]
height = tilemap["height"]
tile = sf.RectangleShape()
vlx, vly = self.window.map_pixel_to_coords((0, 0))
vhx, vhy = self.window.map_pixel_to_coords((cst.WINDOW_WIDTH, cst.WINDOW_HEIGHT))
states = sf.RenderStates()
states.texture = self.rs.tileset.texture
x0 = math.floor(vlx/cst.TILE_SIZE)
x1 = math.ceil(vhx/cst.TILE_SIZE)
y0 = math.floor(vly/cst.TILE_SIZE)
y1 = math.ceil(vhy/cst.TILE_SIZE)
for chunk in drawableMap.chunkset.visibleChunks(x0, x1, y0, y1):
self.window.draw(chunk, states)
# House debug
for wall in self.mapObstacles.staticWalls | self.mapObstacles.dynamicWalls:
if utils.isHorizontal(wall):
line = sf.RectangleShape((4, cst.TILE_SIZE))
line.origin = (2, 0)
else:
line = sf.RectangleShape((cst.TILE_SIZE, 4))
line.origin = (0, 2)
if wall.isdoor:
if not wall.active:
continue
line.fill_color = sf.Color(255, 0, 255)
else:
line.fill_color = sf.Color(255, 255, 0)
line.position = (wall.edge[1][0] * cst.TILE_SIZE, wall.edge[1][1] * cst.TILE_SIZE)
self.window.draw(line)
<|fim▁hole|>class DrawFighter(ecs.System):
def __init__(self, window, mapObstacles):
self.window = window
self.mapObstacles = mapObstacles
self.team = -1
def update(self, em, eventManager, dt):
allies = em.teamMembers(self.team)
for e in em.getEntitiesWithComponents([comp.DrawableFighter, comp.Position, comp.Fighter]):
if e.component(comp.Fighter).team != self.team and not utils.oneCanSee(allies, e, self.mapObstacles):
continue
pos = e.component(comp.Position)
shape = e.component(comp.DrawableFighter).surface
shape.position = (pos.x + 0.5*cst.TILE_SIZE - shape.radius, pos.y + 0.5*cst.TILE_SIZE - shape.radius)
if e.component(comp.Fighter).team == self.team:
shape.fill_color = sf.Color.BLUE
else:
shape.fill_color = sf.Color.RED
self.window.draw(shape)
class DrawHealthBar(ecs.System):
def __init__(self, window, view, mapObstacles):
self.window = window
self.view = view
self.team = -1
self.mapObstacles = mapObstacles
def update(self, em, eventManager, dt):
allies = em.teamMembers(self.team)
zoom_factor = cst.WINDOW_WIDTH / self.view.size.x
for e in em.getEntitiesWithComponents([comp.DrawableHUD, comp.Position, comp.Vulnerable]):
if e.component(comp.Vulnerable).visibility == cst.BarVisibility.HIDDEN:
continue
if e.component(comp.Fighter).team != self.team and not utils.oneCanSee(allies, e, self.mapObstacles):
continue
# TODO: can divide by 0, handle with care
hpratio = e.component(comp.Vulnerable).currenthp / e.component(comp.Vulnerable).hpmax
if e.component(comp.Vulnerable).visibility == cst.BarVisibility.DAMAGED and hpratio == 1:
continue
# Draw hp bar
x, y = e.component(comp.Position).x, e.component(comp.Position).y
bar_position = self.window.map_coords_to_pixel((x + cst.BAR_X, y + cst.TILE_SIZE + cst.BAR_Y), self.view)
redbar = sf.RectangleShape()
redbar.position = bar_position
redbar.size = (cst.BAR_WIDTH * zoom_factor, cst.BAR_HEIGHT * zoom_factor)
redbar.fill_color = sf.Color.RED
redbar.outline_thickness = 1
redbar.outline_color = sf.Color.BLACK
self.window.draw(redbar)
if hpratio != 0:
greenbar = sf.RectangleShape()
greenbar.position = bar_position
greenbar.size = (hpratio * cst.BAR_WIDTH * zoom_factor, cst.BAR_HEIGHT * zoom_factor)
greenbar.fill_color = sf.Color.GREEN
self.window.draw(greenbar)
class DrawWeaponRange(ecs.System):
def __init__(self, window, view):
self.window = window
self.view = view
def update(self, em, eventManager, dt):
zoom_factor = cst.WINDOW_WIDTH / self.view.size.x
for e in em.getEntitiesWithComponents([comp.DrawableHUD, comp.Position, comp.Fighter, comp.Weapon, comp.Selected]):
pos = e.component(comp.Position)
pos = self.window.map_coords_to_pixel((pos.x + .5 * cst.TILE_SIZE, pos.y + .5 * cst.TILE_SIZE), self.view)
rangeCircle = sf.CircleShape()
rangeCircle.radius = e.component(comp.Weapon).atkRange * zoom_factor
rangeCircle.origin = (rangeCircle.radius, rangeCircle.radius)
rangeCircle.position = (pos.x, pos.y)
rangeCircle.fill_color = sf.Color.TRANSPARENT
rangeCircle.outline_thickness = 1
rangeCircle.outline_color = sf.Color(0, 0, 0, 128)
self.window.draw(rangeCircle)
class DrawTeamHUD(ecs.System):
def __init__(self, window, rs):
self.window = window
self.rs = rs
self.team = -1
def update(self, em, eventManager, dt):
allies = em.teamMembers(self.team)
leaderPortrait = sf.RectangleShape((cst.PORTRAIT_LEADER_SIZE, cst.PORTRAIT_LEADER_SIZE))
leaderPortrait.origin = (0, cst.PORTRAIT_LEADER_SIZE)
leaderPortrait.position = (cst.PORTRAIT_X_MARGIN, cst.WINDOW_HEIGHT - cst.PORTRAIT_Y_MARGIN)
text = sf.Text()
text.font = self.rs.font
text.character_size = 30
text.color = sf.Color(128, 128, 128)
self.window.draw(leaderPortrait)
leader = [e for e in allies if e.hasComponent(comp.Leader)]
if len(leader): # Should be always true
text.string = leader[0].component(comp.Fighter).name[0]
text.origin = (text.global_bounds.width / 2, text.global_bounds.height / 2)
text.position = (leaderPortrait.position.x + cst.PORTRAIT_LEADER_SIZE / 2, leaderPortrait.position.y - cst.PORTRAIT_LEADER_SIZE / 2)
self.window.draw(text)
allies.remove(leader[0])
text.character_size = 16
for i in range(cst.MAX_TEAM_SIZE - 1):
emptySlot = (i >= len(allies))
portrait = sf.RectangleShape((cst.PORTRAIT_NORMAL_SIZE, cst.PORTRAIT_NORMAL_SIZE))
portrait.origin = (0, cst.PORTRAIT_NORMAL_SIZE)
portrait.position = (cst.PORTRAIT_X_MARGIN + cst.PORTRAIT_LEADER_SIZE + i * cst.PORTRAIT_NORMAL_SIZE + (i+1) * cst.PORTRAIT_INTER, cst.WINDOW_HEIGHT - cst.PORTRAIT_Y_MARGIN)
if emptySlot:
portrait.fill_color = sf.Color(128, 128, 128)
self.window.draw(portrait)
if not emptySlot:
text.string = allies[i].component(comp.Fighter).name[0]
text.origin = (text.global_bounds.width / 2, text.global_bounds.height / 2)
text.position = (portrait.position.x + cst.PORTRAIT_NORMAL_SIZE / 2, portrait.position.y - cst.PORTRAIT_NORMAL_SIZE / 2)
self.window.draw(text)
class DrawFPS(ecs.System):
def __init__(self, window, rs):
self.window = window
self.rs = rs
self.sum_dt = 0
self.num_dt = 0
self.old_fps = "60"
def update(self, em, eventManager, dt):
self.sum_dt += dt
self.num_dt += 1
fps = sf.Text()
fps.font = self.rs.font
fps.character_size = 16
fps.color = sf.Color.RED
if self.sum_dt >= 0.5:
self.old_fps = str(int(self.num_dt/self.sum_dt))
self.sum_dt, self.num_dt = 0, 0
fps.string = self.old_fps
fps.origin = (fps.global_bounds.width, 0)
fps.position = (cst.WINDOW_WIDTH - cst.HUD_MARGIN, cst.HUD_MARGIN)
self.window.draw(fps)
### Core ###
class Teleportation(ecs.System):
def __init__(self):
pass
def update(self, em, eventManager, dt):
for e in em.getEntitiesWithComponents([comp.Position, comp.MovementTarget]):
pos = e.component(comp.Position)
targetTile = e.component(comp.MovementTarget).target
pos.x, pos.y = targetTile[0] * cst.TILE_SIZE, targetTile[1] * cst.TILE_SIZE
e.removeComponent(comp.MovementTarget)
e.removeComponent(comp.Selected)
class MovementAI(ecs.System):
def __init__(self, mapObstacles, mapData):
self.mapObstacles = mapObstacles
self.mapData = mapData
def update(self, em, eventManager, dt):
for e in em.getEntitiesWithComponents([comp.Position, comp.MovementTarget, comp.Fighter]):
pos = e.component(comp.Position)
currentTile = utils.world2grid((pos.x, pos.y))
targetTile = e.component(comp.MovementTarget).target
targetWorld = utils.grid2world(targetTile)
if utils.norm2(utils.vec2((pos.x, pos.y), targetWorld)) < 1:
pos.x, pos.y = targetWorld # Align the actual position along the tile
e.removeComponent(comp.MovementTarget)
e.removeComponent(comp.Path)
else:
if not e.hasComponent(comp.Path):
p = utils.pathToTile(e, targetTile, self.mapObstacles, self.mapData)
if p == None: # No path found
e.removeComponent(comp.MovementTarget)
e.removeComponent(comp.Selected)
continue
e.addComponent(comp.Path(p, 0))
path = e.component(comp.Path)
fighter = e.component(comp.Fighter)
delta = utils.vec2((pos.x, pos.y), utils.grid2world(path.path[path.currentIndex]))
if utils.norm2(delta) < 1:
path.currentIndex += 1
# Apply velocity of the underlying tile
velocity = cst.tileVelocity[cst.TileType(self.mapData["tiles"][currentTile[0] + currentTile[1] * self.mapData["width"]])]
length = utils.norm(delta)
if length > fighter.movSpeed * dt * velocity: # If destination is too far to be reached within one turn
movement = (delta[0] * fighter.movSpeed * dt * velocity / length, delta[1] * fighter.movSpeed * dt * velocity / length)
else: # If destination can be reached within one turn
movement = (delta[0], delta[1])
pos.x += movement[0]
pos.y += movement[1]
class PlayerAttack(ecs.System):
def __init__(self):
pass
def update(self, em, eventManager, dt):
for e in em.getEntitiesWithComponents([comp.Position, comp.Fighter, comp.Weapon, comp.AttackTarget]):
target = e.component(comp.AttackTarget)
foe = target.target
if foe.component(comp.Vulnerable).currenthp <= 0:
e.removeComponent(comp.AttackTarget)
continue
if not utils.inWeaponRange(e, foe):
pos = e.component(comp.Position)
foepos = foe.component(comp.Position)
currentMoveT = e.component(comp.MovementTarget) if e.hasComponent(comp.MovementTarget) else None
moveT = utils.closestTileInRange((pos.x, pos.y), (foepos.x, foepos.y), e.component(comp.Weapon).atkRange)
if currentMoveT == None or currentMoveT.target != moveT:
e.removeComponent(comp.Path)
e.addComponent(comp.MovementTarget(moveT))
continue
atkSpeed = e.component(comp.Weapon).atkSpeed
nHits = int(target.dt * atkSpeed)
effectiveDmg = nHits * utils.effectiveDmg(e, foe)
diff = foe.component(comp.Vulnerable).currenthp - effectiveDmg
foe.component(comp.Vulnerable).currenthp = diff if diff > 0 else 0
target.dt += dt
target.dt -= nHits / atkSpeed<|fim▁end|> | |
<|file_name|>lookahead.rs<|end_file_name|><|fim▁begin|>//use std::collections::LinkedList;
use token::Token;
use std::mem::replace;
// test case: x=0;y=g=1;alert(eval("while(x)break\n/y/g.exec('y')"))
// see: https://groups.google.com/d/msg/mozilla.dev.tech.js-engine.internals/2JLH5jRcr7E/Mxc7ZKc5r6sJ
pub struct Buffer {
//tokens: LinkedList<Token>
token: Option<Token>
}
impl Buffer {
pub fn new() -> Buffer {
Buffer {
token: None
//tokens: LinkedList::new()
}
}
pub fn is_empty(&mut self) -> bool {
//self.tokens.len() == 0
self.token.is_none()
}
pub fn push_token(&mut self, token: Token) {
//assert!(self.tokens.len() == 0);
//self.tokens.push_back(token);
debug_assert!(self.token.is_none());
self.token = Some(token)
}
pub fn read_token(&mut self) -> Token {
//assert!(self.tokens.len() > 0);
//self.tokens.pop_front().unwrap()<|fim▁hole|> replace(&mut self.token, None).unwrap()
}
pub fn peek_token(&mut self) -> &Token {
//assert!(self.tokens.len() > 0);
//self.tokens.front().unwrap()
debug_assert!(self.token.is_some());
self.token.as_ref().unwrap()
}
pub fn unread_token(&mut self, token: Token) {
//assert!(self.tokens.len() < 3);
//self.tokens.push_front(token);
debug_assert!(self.token.is_none());
self.token = Some(token);
}
}<|fim▁end|> | debug_assert!(self.token.is_some()); |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::{fmt, env};
use hir::map::definitions::DefPathData;
use mir;
use ty::{self, Ty, layout};
use ty::layout::{Size, Align, LayoutError};
use rustc_target::spec::abi::Abi;
use super::{RawConst, Pointer, InboundsCheck, ScalarMaybeUndef};
use backtrace::Backtrace;
use ty::query::TyCtxtAt;
use errors::DiagnosticBuilder;
use syntax_pos::{Pos, Span};
use syntax::ast;
use syntax::symbol::Symbol;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum ErrorHandled {
/// Already reported a lint or an error for this evaluation
Reported,
/// Don't emit an error, the evaluation failed because the MIR was generic
/// and the substs didn't fully monomorphize it.
TooGeneric,
}
impl ErrorHandled {
pub fn assert_reported(self) {
match self {
ErrorHandled::Reported => {},
ErrorHandled::TooGeneric => bug!("MIR interpretation failed without reporting an error \
even though it was fully monomorphized"),
}
}
}
pub type ConstEvalRawResult<'tcx> = Result<RawConst<'tcx>, ErrorHandled>;
pub type ConstEvalResult<'tcx> = Result<&'tcx ty::Const<'tcx>, ErrorHandled>;
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct ConstEvalErr<'tcx> {
pub span: Span,
pub error: ::mir::interpret::EvalErrorKind<'tcx, u64>,
pub stacktrace: Vec<FrameInfo<'tcx>>,
}
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct FrameInfo<'tcx> {
pub call_site: Span, // this span is in the caller!
pub instance: ty::Instance<'tcx>,
pub lint_root: Option<ast::NodeId>,
}
impl<'tcx> fmt::Display for FrameInfo<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
ty::tls::with(|tcx| {
if tcx.def_key(self.instance.def_id()).disambiguated_data.data
== DefPathData::ClosureExpr
{
write!(f, "inside call to closure")?;
} else {
write!(f, "inside call to `{}`", self.instance)?;
}
if !self.call_site.is_dummy() {
let lo = tcx.sess.source_map().lookup_char_pos_adj(self.call_site.lo());
write!(f, " at {}:{}:{}", lo.filename, lo.line, lo.col.to_usize() + 1)?;
}
Ok(())
})
}
}
impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
pub fn struct_error(&self,
tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
message: &str)
-> Result<DiagnosticBuilder<'tcx>, ErrorHandled>
{
self.struct_generic(tcx, message, None)
}
pub fn report_as_error(&self,
tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
message: &str
) -> ErrorHandled {
let err = self.struct_error(tcx, message);
match err {
Ok(mut err) => {
err.emit();
ErrorHandled::Reported
},
Err(err) => err,
}
}
pub fn report_as_lint(&self,
tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
message: &str,
lint_root: ast::NodeId,
) -> ErrorHandled {
let lint = self.struct_generic(
tcx,
message,
Some(lint_root),
);
match lint {
Ok(mut lint) => {
lint.emit();
ErrorHandled::Reported
},
Err(err) => err,
}
}
fn struct_generic(
&self,
tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
message: &str,
lint_root: Option<ast::NodeId>,
) -> Result<DiagnosticBuilder<'tcx>, ErrorHandled> {
match self.error {
EvalErrorKind::Layout(LayoutError::Unknown(_)) |
EvalErrorKind::TooGeneric => return Err(ErrorHandled::TooGeneric),
EvalErrorKind::Layout(LayoutError::SizeOverflow(_)) |
EvalErrorKind::TypeckError => return Err(ErrorHandled::Reported),
_ => {},
}
trace!("reporting const eval failure at {:?}", self.span);
let mut err = if let Some(lint_root) = lint_root {
let node_id = self.stacktrace
.iter()
.rev()
.filter_map(|frame| frame.lint_root)
.next()
.unwrap_or(lint_root);
tcx.struct_span_lint_node(
::rustc::lint::builtin::CONST_ERR,
node_id,
tcx.span,
message,
)
} else {
struct_error(tcx, message)
};
err.span_label(self.span, self.error.to_string());
// Skip the last, which is just the environment of the constant. The stacktrace
// is sometimes empty because we create "fake" eval contexts in CTFE to do work
// on constant values.
if self.stacktrace.len() > 0 {
for frame_info in &self.stacktrace[..self.stacktrace.len()-1] {
err.span_label(frame_info.call_site, frame_info.to_string());
}
}
Ok(err)
}
}
pub fn struct_error<'a, 'gcx, 'tcx>(
tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
msg: &str,
) -> DiagnosticBuilder<'tcx> {
struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
}
#[derive(Debug, Clone)]
pub struct EvalError<'tcx> {
pub kind: EvalErrorKind<'tcx, u64>,
pub backtrace: Option<Box<Backtrace>>,
}
impl<'tcx> EvalError<'tcx> {
pub fn print_backtrace(&mut self) {
if let Some(ref mut backtrace) = self.backtrace {
print_backtrace(&mut *backtrace);
}
}
}
fn print_backtrace(backtrace: &mut Backtrace) {
backtrace.resolve();
eprintln!("\n\nAn error occurred in miri:\n{:?}", backtrace);
}
impl<'tcx> From<EvalErrorKind<'tcx, u64>> for EvalError<'tcx> {
fn from(kind: EvalErrorKind<'tcx, u64>) -> Self {
let backtrace = match env::var("RUST_CTFE_BACKTRACE") {
// matching RUST_BACKTRACE, we treat "0" the same as "not present".
Ok(ref val) if val != "0" => {
let mut backtrace = Backtrace::new_unresolved();
if val == "immediate" {
// Print it now
print_backtrace(&mut backtrace);
None
} else {
Some(Box::new(backtrace))
}
},
_ => None,
};
EvalError {
kind,
backtrace,
}
}
}
pub type AssertMessage<'tcx> = EvalErrorKind<'tcx, mir::Operand<'tcx>>;
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub enum EvalErrorKind<'tcx, O> {
/// This variant is used by machines to signal their own errors that do not
/// match an existing variant
MachineError(String),
FunctionAbiMismatch(Abi, Abi),
FunctionArgMismatch(Ty<'tcx>, Ty<'tcx>),
FunctionRetMismatch(Ty<'tcx>, Ty<'tcx>),
FunctionArgCountMismatch,
NoMirFor(String),
UnterminatedCString(Pointer),
DanglingPointerDeref,
DoubleFree,
InvalidMemoryAccess,
InvalidFunctionPointer,
InvalidBool,
InvalidDiscriminant(ScalarMaybeUndef),
PointerOutOfBounds {
ptr: Pointer,
check: InboundsCheck,
allocation_size: Size,
},
InvalidNullPointerUsage,
ReadPointerAsBytes,
ReadBytesAsPointer,
ReadForeignStatic,
InvalidPointerMath,
ReadUndefBytes(Size),
DeadLocal,
InvalidBoolOp(mir::BinOp),
Unimplemented(String),
DerefFunctionPointer,
ExecuteMemory,
BoundsCheck { len: O, index: O },
Overflow(mir::BinOp),
OverflowNeg,
DivisionByZero,
RemainderByZero,
Intrinsic(String),
InvalidChar(u128),
StackFrameLimitReached,
OutOfTls,
TlsOutOfBounds,
AbiViolation(String),
AlignmentCheckFailed {
required: Align,
has: Align,
},
ValidationFailure(String),
CalledClosureAsFunction,
VtableForArgumentlessMethod,
ModifiedConstantMemory,
ModifiedStatic,
AssumptionNotHeld,
InlineAsm,
TypeNotPrimitive(Ty<'tcx>),
ReallocatedWrongMemoryKind(String, String),
DeallocatedWrongMemoryKind(String, String),
ReallocateNonBasePtr,
DeallocateNonBasePtr,
IncorrectAllocationInformation(Size, Size, Align, Align),
Layout(layout::LayoutError<'tcx>),
HeapAllocZeroBytes,
HeapAllocNonPowerOfTwoAlignment(u64),
Unreachable,
Panic {
msg: Symbol,
line: u32,
col: u32,
file: Symbol,
},
ReadFromReturnPointer,
PathNotFound(Vec<String>),
UnimplementedTraitSelection,
/// Abort in case type errors are reached
TypeckError,
/// Resolution can fail if we are in a too generic context
TooGeneric,
/// Cannot compute this constant because it depends on another one
/// which already produced an error
ReferencedConstant,
GeneratorResumedAfterReturn,
GeneratorResumedAfterPanic,
InfiniteLoop,
}
pub type EvalResult<'tcx, T = ()> = Result<T, EvalError<'tcx>>;
impl<'tcx, O> EvalErrorKind<'tcx, O> {
pub fn description(&self) -> &str {
use self::EvalErrorKind::*;
match *self {
MachineError(ref inner) => inner,
FunctionAbiMismatch(..) | FunctionArgMismatch(..) | FunctionRetMismatch(..)
| FunctionArgCountMismatch =>
"tried to call a function through a function pointer of incompatible type",
InvalidMemoryAccess =>
"tried to access memory through an invalid pointer",
DanglingPointerDeref =>
"dangling pointer was dereferenced",
DoubleFree =>
"tried to deallocate dangling pointer",
InvalidFunctionPointer =>
"tried to use a function pointer after offsetting it",
InvalidBool =>
"invalid boolean value read",
InvalidDiscriminant(..) =>
"invalid enum discriminant value read",
PointerOutOfBounds { .. } =>
"pointer offset outside bounds of allocation",
InvalidNullPointerUsage =>
"invalid use of NULL pointer",
ValidationFailure(..) =>
"type validation failed",
ReadPointerAsBytes =>
"a raw memory access tried to access part of a pointer value as raw bytes",
ReadBytesAsPointer =>
"a memory access tried to interpret some bytes as a pointer",
ReadForeignStatic =>
"tried to read from foreign (extern) static",
InvalidPointerMath =>
"attempted to do invalid arithmetic on pointers that would leak base addresses, \
e.g., comparing pointers into different allocations",
ReadUndefBytes(_) =>
"attempted to read undefined bytes",
DeadLocal =>
"tried to access a dead local variable",
InvalidBoolOp(_) =>
"invalid boolean operation",
Unimplemented(ref msg) => msg,
DerefFunctionPointer =><|fim▁hole|> "array index out of bounds",
Intrinsic(..) =>
"intrinsic failed",
NoMirFor(..) =>
"mir not found",
InvalidChar(..) =>
"tried to interpret an invalid 32-bit value as a char",
StackFrameLimitReached =>
"reached the configured maximum number of stack frames",
OutOfTls =>
"reached the maximum number of representable TLS keys",
TlsOutOfBounds =>
"accessed an invalid (unallocated) TLS key",
AbiViolation(ref msg) => msg,
AlignmentCheckFailed{..} =>
"tried to execute a misaligned read or write",
CalledClosureAsFunction =>
"tried to call a closure through a function pointer",
VtableForArgumentlessMethod =>
"tried to call a vtable function without arguments",
ModifiedConstantMemory =>
"tried to modify constant memory",
ModifiedStatic =>
"tried to modify a static's initial value from another static's initializer",
AssumptionNotHeld =>
"`assume` argument was false",
InlineAsm =>
"miri does not support inline assembly",
TypeNotPrimitive(_) =>
"expected primitive type, got nonprimitive",
ReallocatedWrongMemoryKind(_, _) =>
"tried to reallocate memory from one kind to another",
DeallocatedWrongMemoryKind(_, _) =>
"tried to deallocate memory of the wrong kind",
ReallocateNonBasePtr =>
"tried to reallocate with a pointer not to the beginning of an existing object",
DeallocateNonBasePtr =>
"tried to deallocate with a pointer not to the beginning of an existing object",
IncorrectAllocationInformation(..) =>
"tried to deallocate or reallocate using incorrect alignment or size",
Layout(_) =>
"rustc layout computation failed",
UnterminatedCString(_) =>
"attempted to get length of a null terminated string, but no null found before end \
of allocation",
HeapAllocZeroBytes =>
"tried to re-, de- or allocate zero bytes on the heap",
HeapAllocNonPowerOfTwoAlignment(_) =>
"tried to re-, de-, or allocate heap memory with alignment that is not a power of \
two",
Unreachable =>
"entered unreachable code",
Panic { .. } =>
"the evaluated program panicked",
ReadFromReturnPointer =>
"tried to read from the return pointer",
PathNotFound(_) =>
"a path could not be resolved, maybe the crate is not loaded",
UnimplementedTraitSelection =>
"there were unresolved type arguments during trait selection",
TypeckError =>
"encountered constants with type errors, stopping evaluation",
TooGeneric =>
"encountered overly generic constant",
ReferencedConstant =>
"referenced constant has errors",
Overflow(mir::BinOp::Add) => "attempt to add with overflow",
Overflow(mir::BinOp::Sub) => "attempt to subtract with overflow",
Overflow(mir::BinOp::Mul) => "attempt to multiply with overflow",
Overflow(mir::BinOp::Div) => "attempt to divide with overflow",
Overflow(mir::BinOp::Rem) => "attempt to calculate the remainder with overflow",
OverflowNeg => "attempt to negate with overflow",
Overflow(mir::BinOp::Shr) => "attempt to shift right with overflow",
Overflow(mir::BinOp::Shl) => "attempt to shift left with overflow",
Overflow(op) => bug!("{:?} cannot overflow", op),
DivisionByZero => "attempt to divide by zero",
RemainderByZero => "attempt to calculate the remainder with a divisor of zero",
GeneratorResumedAfterReturn => "generator resumed after completion",
GeneratorResumedAfterPanic => "generator resumed after panicking",
InfiniteLoop =>
"duplicate interpreter state observed here, const evaluation will never terminate",
}
}
}
impl<'tcx> fmt::Display for EvalError<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.kind)
}
}
impl<'tcx> fmt::Display for EvalErrorKind<'tcx, u64> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl<'tcx, O: fmt::Debug> fmt::Debug for EvalErrorKind<'tcx, O> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use self::EvalErrorKind::*;
match *self {
PointerOutOfBounds { ptr, check, allocation_size } => {
write!(f, "Pointer must be in-bounds{} at offset {}, but is outside bounds of \
allocation {} which has size {}",
match check {
InboundsCheck::Live => " and live",
InboundsCheck::MaybeDead => "",
},
ptr.offset.bytes(), ptr.alloc_id, allocation_size.bytes())
},
ValidationFailure(ref err) => {
write!(f, "type validation failed: {}", err)
}
NoMirFor(ref func) => write!(f, "no mir for `{}`", func),
FunctionAbiMismatch(caller_abi, callee_abi) =>
write!(f, "tried to call a function with ABI {:?} using caller ABI {:?}",
callee_abi, caller_abi),
FunctionArgMismatch(caller_ty, callee_ty) =>
write!(f, "tried to call a function with argument of type {:?} \
passing data of type {:?}",
callee_ty, caller_ty),
FunctionRetMismatch(caller_ty, callee_ty) =>
write!(f, "tried to call a function with return type {:?} \
passing return place of type {:?}",
callee_ty, caller_ty),
FunctionArgCountMismatch =>
write!(f, "tried to call a function with incorrect number of arguments"),
BoundsCheck { ref len, ref index } =>
write!(f, "index out of bounds: the len is {:?} but the index is {:?}", len, index),
ReallocatedWrongMemoryKind(ref old, ref new) =>
write!(f, "tried to reallocate memory from {} to {}", old, new),
DeallocatedWrongMemoryKind(ref old, ref new) =>
write!(f, "tried to deallocate {} memory but gave {} as the kind", old, new),
Intrinsic(ref err) =>
write!(f, "{}", err),
InvalidChar(c) =>
write!(f, "tried to interpret an invalid 32-bit value as a char: {}", c),
AlignmentCheckFailed { required, has } =>
write!(f, "tried to access memory with alignment {}, but alignment {} is required",
has.bytes(), required.bytes()),
TypeNotPrimitive(ty) =>
write!(f, "expected primitive type, got {}", ty),
Layout(ref err) =>
write!(f, "rustc layout computation failed: {:?}", err),
PathNotFound(ref path) =>
write!(f, "Cannot find path {:?}", path),
MachineError(ref inner) =>
write!(f, "{}", inner),
IncorrectAllocationInformation(size, size2, align, align2) =>
write!(f, "incorrect alloc info: expected size {} and align {}, \
got size {} and align {}",
size.bytes(), align.bytes(), size2.bytes(), align2.bytes()),
Panic { ref msg, line, col, ref file } =>
write!(f, "the evaluated program panicked at '{}', {}:{}:{}", msg, file, line, col),
InvalidDiscriminant(val) =>
write!(f, "encountered invalid enum discriminant {}", val),
_ => write!(f, "{}", self.description()),
}
}
}<|fim▁end|> | "tried to dereference a function pointer",
ExecuteMemory =>
"tried to treat a memory pointer as a function pointer",
BoundsCheck{..} => |
<|file_name|>TestConversions.java<|end_file_name|><|fim▁begin|>package de.jpaw.fixedpoint.tests;
import java.math.BigDecimal;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;<|fim▁hole|>public class TestConversions {
@Test
public void testFromConversions() throws Exception {
MicroUnits fromLong = MicroUnits.valueOf(2);
MicroUnits fromDouble = MicroUnits.valueOf(2.0);
MicroUnits fromString = MicroUnits.valueOf("2.0");
MicroUnits fromBigDecimal = MicroUnits.valueOf(BigDecimal.valueOf(2));
MicroUnits fromMantissa = MicroUnits.of(2_000_000L);
Assertions.assertEquals(fromMantissa, fromBigDecimal, "from BigDecimal");
Assertions.assertEquals(fromMantissa, fromString, "from String");
Assertions.assertEquals(fromMantissa, fromDouble, "from double");
Assertions.assertEquals(fromMantissa, fromLong, "from long");
}
@Test
public void testToConversions() throws Exception {
MicroUnits value = MicroUnits.valueOf(2);
Assertions.assertEquals("2", value.toString(), "to String");
Assertions.assertEquals(BigDecimal.valueOf(2).setScale(6), value.toBigDecimal(), "to BigDecimal");
Assertions.assertEquals(2, value.intValue(), "to int");
Assertions.assertEquals(2.0, value.doubleValue(), "to double");
Assertions.assertEquals(2_000_000L, value.getMantissa(), "to Mantissa");
}
}<|fim▁end|> |
import de.jpaw.fixedpoint.types.MicroUnits;
|
<|file_name|>document.js<|end_file_name|><|fim▁begin|>import { Class, clone, isArray } from './lib/objects';
import { diffs } from './lib/diffs';
import { eq } from './lib/eq';
import { PathNotFoundException } from './lib/exceptions';
/**
`Document` is a complete implementation of the JSON PATCH spec detailed in
[RFC 6902](http://tools.ietf.org/html/rfc6902).
A document can be manipulated via a `transform` method that accepts an
`operation`, or with the methods `add`, `remove`, `replace`, `move`, `copy` and
`test`.
Data at a particular path can be retrieved from a `Document` with `retrieve()`.
@class Document
@namespace Orbit
@param {Object} [data] The initial data for the document
@param {Object} [options]
@param {Boolean} [options.arrayBasedPaths=false] Should paths be array based, or `'/'` delimited (the default)?
@constructor
*/
var Document = Class.extend({
init: function(data, options) {
options = options || {};
this.arrayBasedPaths = options.arrayBasedPaths !== undefined ? options.arrayBasedPaths : false;
this.reset(data);
},
/**
Reset the contents of the whole document.
If no data is specified, the contents of the document will be reset to an
empty object.
@method reset
@param {Object} [data] New root object
*/
reset: function(data) {
this._data = data || {};
},
/**
Retrieve the value at a path.
If the path does not exist in the document, `PathNotFoundException` will be
thrown by default. If `quiet` is truthy, `undefined` will be returned
instead.
@method retrieve
@param {Array or String} [path]
@param {Boolean} [quiet=false] Return `undefined` instead of throwing an exception if `path` can't be found?
@returns {Object} Value at the specified `path` or `undefined`
*/
retrieve: function(path, quiet) {
return this._retrieve(this.deserializePath(path), quiet);
},
/**
Sets the value at a path.
If the target location specifies an array index, inserts a new value
into the array at the specified index.
If the target location specifies an object member that does not
already exist, adds a new member to the object.
If the target location specifies an object member that does exist,
replaces that member's value.
If the target location does not exist, throws `PathNotFoundException`.
@method add
@param {Array or String} path
@param {Object} value
@param {Boolean} [invert=false] Return the inverse operations?
@returns {Array} Array of inverse operations if `invert === true`
*/
add: function(path, value, invert) {
return this._add(this.deserializePath(path), value, invert);
},
/**
Removes the value from a path.
If removing an element from an array, shifts any elements above the
specified index one position to the left.
If the target location does not exist, throws `PathNotFoundException`.
@method remove
@param {Array or String} path
@param {Boolean} [invert=false] Return the inverse operations?
@returns {Array} Array of inverse operations if `invert === true`
*/
remove: function(path, invert) {
return this._remove(this.deserializePath(path), invert);
},
/**
Replaces the value at a path.
This operation is functionally identical to a "remove" operation for
a value, followed immediately by an "add" operation at the same
location with the replacement value.
If the target location does not exist, throws `PathNotFoundException`.
@method replace
@param {Array or String} path
@param {Object} value
@param {Boolean} [invert=false] Return the inverse operations?
@returns {Array} Array of inverse operations if `invert === true`
*/
replace: function(path, value, invert) {
return this._replace(this.deserializePath(path), value, invert);
},
/**
Moves an object from one path to another.
Identical to calling `remove()` followed by `add()`.
Throws `PathNotFoundException` if either path does not exist in the document.
@method move
@param {Array or String} fromPath
@param {Array or String} toPath
@param {Boolean} [invert=false] Return the inverse operations?
@returns {Array} Array of inverse operations if `invert === true`
*/
move: function(fromPath, toPath, invert) {
return this._move(this.deserializePath(fromPath), this.deserializePath(toPath), invert);
},
/**
Copies an object at one path and adds it to another.
Identical to calling `add()` with the value at `fromPath`.
Throws `PathNotFoundException` if either path does not exist in the document.
@method copy
@param {Array or String} fromPath
@param {Array or String} toPath
@param {Boolean} [invert=false] Return the inverse operations?
@returns {Array} Array of inverse operations if `invert === true`
*/
copy: function(fromPath, toPath, invert) {
return this._copy(this.deserializePath(fromPath), this.deserializePath(toPath), invert);
},
/**
Tests that the value at a path matches an expectation.
Uses `Orbit.eq` to test equality.
Throws `PathNotFoundException` if the path does not exist in the document.
@method test
@param {Array or String} [path]
@param {Object} [value] Expected value to test
@param {Boolean} [quiet=false] Use `undefined` instead of throwing an exception if `path` can't be found?
@returns {Boolean} Does the value at `path` equal `value`?
*/
test: function(path, value, quiet) {
return eq(this._retrieve(this.deserializePath(path), quiet), value);
},
/**
Transforms the document with an RFC 6902-compliant operation.
Throws `PathNotFoundException` if the path does not exist in the document.
@method transform
@param {Object} operation
@param {String} operation.op Must be "add", "remove", "replace", "move", "copy", or "test"
@param {Array or String} operation.path Path to target location
@param {Array or String} operation.from Path to source target location. Required for "copy" and "move"
@param {Object} operation.value Value to set. Required for "add", "replace" and "test"
@param {Boolean} [invert=false] Return the inverse operations?
@returns {Array} Array of inverse operations if `invert === true`
*/
transform: function(operation, invert) {
if (operation.op === 'add') {
return this.add(operation.path, operation.value, invert);
} else if (operation.op === 'remove') {
return this.remove(operation.path, invert);
} else if (operation.op === 'replace') {
return this.replace(operation.path, operation.value, invert);
} else if (operation.op === 'move') {
return this.move(operation.from, operation.path, invert);
} else if (operation.op === 'copy') {
return this.copy(operation.from, operation.path, invert);
} else if (operation.op === 'test') {
return this.copy(operation.path, operation.value);
}
},
serializePath: function(path) {
if (this.arrayBasedPaths) {
return path;
} else {
if (path.length === 0) {
return '/';
} else {
return '/' + path.join('/');
}
}
},
deserializePath: function(path) {
if (typeof path === 'string') {
if (path.indexOf('/') === 0) {
path = path.substr(1);
}
if (path.length === 0) {
return [];
} else {
return path.split('/');
}
} else {
return path;
}
},
/////////////////////////////////////////////////////////////////////////////
// Internals
/////////////////////////////////////////////////////////////////////////////
_pathNotFound: function(path, quiet) {
if (quiet) {
return undefined;
} else {
throw new PathNotFoundException(this.serializePath(path));
}
},
_retrieve: function(path, quiet) {
var ptr = this._data,
segment;
if (path) {
for (var i = 0, len = path.length; i < len; i++) {
segment = path[i];
if (isArray(ptr)) {
if (segment === '-') {
ptr = ptr[ptr.length-1];
} else {
ptr = ptr[parseInt(segment, 10)];
}
} else {
ptr = ptr[segment];
}
if (ptr === undefined) {
return this._pathNotFound(path, quiet);
}
}
}
return ptr;
},
_add: function(path, value, invert) {
var inverse;
value = clone(value);
if (path.length > 0) {
var parentKey = path[path.length-1];
if (path.length > 1) {
var grandparent = this._retrieve(path.slice(0, -1));
if (isArray(grandparent)) {
if (parentKey === '-') {
if (invert) {
inverse = [{op: 'remove', path: this.serializePath(path)}];
}
grandparent.push(value);
} else {
var parentIndex = parseInt(parentKey, 10);
if (parentIndex > grandparent.length) {
this._pathNotFound(path);
} else {
if (invert) {
inverse = [{op: 'remove', path: this.serializePath(path)}];
}
grandparent.splice(parentIndex, 0, value);
}
}
} else {
if (invert) {
if (grandparent.hasOwnProperty(parentKey)) {
inverse = [{op: 'replace', path: this.serializePath(path), value: clone(grandparent[parentKey])}];
} else {
inverse = [{op: 'remove', path: this.serializePath(path)}];
}
}
grandparent[parentKey] = value;
}
} else {
if (invert) {
if (this._data.hasOwnProperty(parentKey)) {
inverse = [{op: 'replace', path: this.serializePath(path), value: clone(this._data[parentKey])}];
} else {
inverse = [{op: 'remove', path: this.serializePath(path)}];
}
}
this._data[parentKey] = value;
}
} else {
if (invert) {
inverse = [{op: 'replace', path: this.serializePath([]), value: clone(this._data)}];
}
this._data = value;
}
return inverse;
},
_remove: function(path, invert) {
var inverse;
if (path.length > 0) {
var parentKey = path[path.length-1];
if (path.length > 1) {
var grandparent = this._retrieve(path.slice(0, -1));
if (isArray(grandparent)) {
if (grandparent.length > 0) {
if (parentKey === '-') {
if (invert) {
inverse = [{op: 'add', path: this.serializePath(path), value: clone(grandparent.pop())}];
} else {
grandparent.pop();
}
} else {
var parentIndex = parseInt(parentKey, 10);
if (grandparent[parentIndex] === undefined) {
this._pathNotFound(path);
} else {
if (invert) {
inverse = [{op: 'add', path: this.serializePath(path), value: clone(grandparent.splice(parentIndex, 1)[0])}];
} else {
grandparent.splice(parentIndex, 1);
}
}
}
} else {
this._pathNotFound(path);
}
} else if (grandparent[parentKey] === undefined) {
this._pathNotFound(path);
} else {
if (invert) {
inverse = [{op: 'add', path: this.serializePath(path), value: clone(grandparent[parentKey])}];
}
delete grandparent[parentKey];
}
} else if (this._data[parentKey] === undefined) {
this._pathNotFound(path);
} else {<|fim▁hole|> if (invert) {
inverse = [{op: 'add', path: this.serializePath(path), value: clone(this._data[parentKey])}];
}
delete this._data[parentKey];
}
} else {
if (invert) {
inverse = [{op: 'add', path: this.serializePath(path), value: clone(this._data)}];
}
this._data = {};
}
return inverse;
},
_replace: function(path, value, invert) {
var inverse;
value = clone(value);
if (path.length > 0) {
var parentKey = path[path.length-1];
if (path.length > 1) {
var grandparent = this._retrieve(path.slice(0, -1));
if (isArray(grandparent)) {
if (grandparent.length > 0) {
if (parentKey === '-') {
if (invert) {
inverse = [{op: 'replace', path: this.serializePath(path), value: clone(grandparent[grandparent.length-1])}];
}
grandparent[grandparent.length-1] = value;
} else {
var parentIndex = parseInt(parentKey, 10);
if (grandparent[parentIndex] === undefined) {
this._pathNotFound(path);
} else {
if (invert) {
inverse = [{op: 'replace', path: this.serializePath(path), value: clone(grandparent.splice(parentIndex, 1, value)[0])}];
} else {
grandparent.splice(parentIndex, 1, value);
}
}
}
} else {
this._pathNotFound(path);
}
} else {
if (invert) {
inverse = [{op: 'replace', path: this.serializePath(path), value: clone(grandparent[parentKey])}];
}
grandparent[parentKey] = value;
}
} else {
if (invert) {
inverse = [{op: 'replace', path: this.serializePath(path), value: clone(this._data[parentKey])}];
}
this._data[parentKey] = value;
}
} else {
if (invert) {
inverse = [{op: 'replace', path: this.serializePath([]), value: clone(this._data)}];
}
this._data = value;
}
return inverse;
},
_move: function(fromPath, toPath, invert) {
if (eq(fromPath, toPath)) {
if (invert) return [];
return;
} else {
var value = this._retrieve(fromPath);
if (invert) {
return this._remove(fromPath, true)
.concat(this._add(toPath, value, true))
.reverse();
} else {
this._remove(fromPath);
this._add(toPath, value);
}
}
},
_copy: function(fromPath, toPath, invert) {
if (eq(fromPath, toPath)) {
if (invert) return [];
return;
} else {
return this._add(toPath, this._retrieve(fromPath), invert);
}
}
});
export default Document;<|fim▁end|> | |
<|file_name|>Toolbar.js<|end_file_name|><|fim▁begin|>Miogen.require(['Component.BaseComponent',
'Component.Button'], function () {
Miogen.define('Component.Toolbar', Miogen.Component.BaseComponent.extend({
construct: function (cfg) {
this._super(cfg);<|fim▁hole|> build: function (cb) {
var t = this;
//
// t.addComponent(new Miogen.Component.Button());
// t.addComponent(new Miogen.Component.Button());
// Render the base widget and newly added components
this._super(function () {
cb.call(this);
this.el.addClass('toolbar');
});
//this.el = $('<div class="miogen-widget"><div class="pad">Collection widget</div></div>');
}
}));
});<|fim▁end|> | },
|
<|file_name|>fix-objects.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> */
export declare function fixAdapterObjects(): Promise<void>;
export declare function ensureInstanceObjects(): Promise<void>;<|fim▁end|> | /**
* Fixes/updates/deletes existing adapter objects,
* so they don't have to be deleted manually |
<|file_name|>auto_conf_signals.py<|end_file_name|><|fim▁begin|>import sys
__author__ = 'weijia'
import django.dispatch
before_server_start = django.dispatch.Signal(providing_args=[])
before_server_stop = django.dispatch.Signal(providing_args=[])
class ServerSignalTrigger(object):
def trigger_server_start_if_needed(self):
if sys.argv[1] == "runserver":
before_server_start.send(sender=self)
def trigger_server_stop_if_needed(self):
if sys.argv[1] == "runserver":
before_server_stop.send(sender=self)<|fim▁hole|> print "Process exiting"<|fim▁end|> | |
<|file_name|>make_version.py<|end_file_name|><|fim▁begin|>print """
Version Info
============
To obtain version info::
from scan.version import __version__, version_history
print __version__
print version_history<|fim▁hole|>"""
import sys
sys.path.append("..")
from scan.version import __version__, version_history
print "Version history::"
for line in version_history.splitlines():
print (" " + line)<|fim▁end|> | |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {
grunt.initConfig({
web_server: {
options: {
cors: true,
port: 8000,
nevercache: true,
logRequests: true
},
foo: 'bar'
},
uglify: {
my_target: {
files: {
'dist/ng-video-preview.min.js': ['video-preview.js']
}
}
},
jshint: {
files: ['Gruntfile.js', 'src/**/*.js', 'test/**/*.js'],
options: {
globals: {
jQuery: true
}
}
},<|fim▁hole|> }
});
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-web-server');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.registerTask('default', ['jshint']);
};<|fim▁end|> | watch: {
files: ['<%= jshint.files %>'],
tasks: ['jshint'] |
<|file_name|>test_mongoengine.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2013 Romain Command&
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Tests for factory_boy/MongoEngine interactions."""
import factory
import os<|fim▁hole|>from .compat import unittest
try:
import mongoengine
except ImportError:
mongoengine = None
if os.environ.get('SKIP_MONGOENGINE') == '1':
mongoengine = None
if mongoengine:
from factory.mongoengine import MongoEngineFactory
class Address(mongoengine.EmbeddedDocument):
street = mongoengine.StringField()
class Person(mongoengine.Document):
name = mongoengine.StringField()
address = mongoengine.EmbeddedDocumentField(Address)
class AddressFactory(MongoEngineFactory):
class Meta:
model = Address
street = factory.Sequence(lambda n: 'street%d' % n)
class PersonFactory(MongoEngineFactory):
class Meta:
model = Person
name = factory.Sequence(lambda n: 'name%d' % n)
address = factory.SubFactory(AddressFactory)
@unittest.skipIf(mongoengine is None, "mongoengine not installed.")
class MongoEngineTestCase(unittest.TestCase):
db_name = os.environ.get('MONGO_DATABASE', 'factory_boy_test')
db_host = os.environ.get('MONGO_HOST', 'localhost')
db_port = int(os.environ.get('MONGO_PORT', '27017'))
server_timeout_ms = int(os.environ.get('MONGO_TIMEOUT', '300'))
@classmethod
def setUpClass(cls):
from pymongo import read_preferences as mongo_rp
cls.db = mongoengine.connect(
db=cls.db_name,
host=cls.db_host,
port=cls.db_port,
# PyMongo>=2.1 requires an explicit read_preference.
read_preference=mongo_rp.ReadPreference.PRIMARY,
# PyMongo>=2.1 has a 20s timeout, use 100ms instead
serverselectiontimeoutms=cls.server_timeout_ms,
)
@classmethod
def tearDownClass(cls):
cls.db.drop_database(cls.db_name)
def setUp(self):
mongoengine.connect('factory_boy_test')
def test_build(self):
std = PersonFactory.build()
self.assertEqual('name0', std.name)
self.assertEqual('street0', std.address.street)
self.assertIsNone(std.id)
def test_creation(self):
std1 = PersonFactory.create()
self.assertEqual('name1', std1.name)
self.assertEqual('street1', std1.address.street)
self.assertIsNotNone(std1.id)<|fim▁end|> | |
<|file_name|>validators.py<|end_file_name|><|fim▁begin|>from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import gettext_lazy as _
@deconstructible<|fim▁hole|>class UsernameValidator(validators.RegexValidator):
regex = r'^[\w.]+$'
message = _(
'Enter a valid username. This value may contain only letters, '
'numbers, underscores and periods.'
)<|fim▁end|> | |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from operator import attrgetter
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.backends.db import SessionStore
from django.db.models import Count
from django.db.models.loading import cache
from django.test import TestCase
from models import (ResolveThis, Item, RelatedItem, Child, Leaf, Proxy,
SimpleItem, Feature)
class DeferRegressionTest(TestCase):
def test_basic(self):
# Deferred fields should really be deferred and not accidentally use
# the field's default value just because they aren't passed to __init__
Item.objects.create(name="first", value=42)
obj = Item.objects.only("name", "other_value").get(name="first")
# Accessing "name" doesn't trigger a new database query. Accessing
# "value" or "text" should.
def test():
self.assertEqual(obj.name, "first")
self.assertEqual(obj.other_value, 0)
self.assertNumQueries(0, test)
def test():
self.assertEqual(obj.value, 42)
self.assertNumQueries(1, test)
def test():
self.assertEqual(obj.text, "xyzzy")
self.assertNumQueries(1, test)
def test():
self.assertEqual(obj.text, "xyzzy")
self.assertNumQueries(0, test)
# Regression test for #10695. Make sure different instances don't
# inadvertently share data in the deferred descriptor objects.
i = Item.objects.create(name="no I'm first", value=37)
items = Item.objects.only("value").order_by("-value")
self.assertEqual(items[0].name, "first")
self.assertEqual(items[1].name, "no I'm first")
RelatedItem.objects.create(item=i)
r = RelatedItem.objects.defer("item").get()
self.assertEqual(r.item_id, i.id)
self.assertEqual(r.item, i)
# Some further checks for select_related() and inherited model
# behaviour (regression for #10710).
c1 = Child.objects.create(name="c1", value=42)
c2 = Child.objects.create(name="c2", value=37)
Leaf.objects.create(name="l1", child=c1, second_child=c2)
obj = Leaf.objects.only("name", "child").select_related()[0]
self.assertEqual(obj.child.name, "c1")
self.assertQuerysetEqual(
Leaf.objects.select_related().only("child__name", "second_child__name"), [
"l1",
],
attrgetter("name")<|fim▁hole|> # Models instances with deferred fields should still return the same
# content types as their non-deferred versions (bug #10738).
ctype = ContentType.objects.get_for_model
c1 = ctype(Item.objects.all()[0])
c2 = ctype(Item.objects.defer("name")[0])
c3 = ctype(Item.objects.only("name")[0])
self.assertTrue(c1 is c2 is c3)
# Regression for #10733 - only() can be used on a model with two
# foreign keys.
results = Leaf.objects.only("name", "child", "second_child").select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
results = Leaf.objects.only("name", "child", "second_child", "child__name", "second_child__name").select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
# Test for #12163 - Pickling error saving session with unsaved model
# instances.
SESSION_KEY = '2b1189a188b44ad18c35e1baac6ceead'
item = Item()
item._deferred = False
s = SessionStore(SESSION_KEY)
s.clear()
s["item"] = item
s.save()
s = SessionStore(SESSION_KEY)
s.modified = True
s.save()
i2 = s["item"]
self.assertFalse(i2._deferred)
# Regression for #11936 - loading.get_models should not return deferred
# models by default.
klasses = sorted(
cache.get_models(cache.get_app("defer_regress")),
key=lambda klass: klass.__name__
)
self.assertEqual(
klasses, [
Child,
Feature,
Item,
Leaf,
Proxy,
RelatedItem,
ResolveThis,
SimpleItem,
]
)
klasses = sorted(
map(
attrgetter("__name__"),
cache.get_models(
cache.get_app("defer_regress"), include_deferred=True
),
)
)
self.assertEqual(
klasses, [
"Child",
"Child_Deferred_value",
"Feature",
"Item",
"Item_Deferred_name",
"Item_Deferred_name_other_value_text",
"Item_Deferred_name_other_value_value",
"Item_Deferred_other_value_text_value",
"Item_Deferred_text_value",
"Leaf",
"Leaf_Deferred_child_id_second_child_id_value",
"Leaf_Deferred_name_value",
"Leaf_Deferred_second_child_value",
"Leaf_Deferred_value",
"Proxy",
"RelatedItem",
"RelatedItem_Deferred_",
"RelatedItem_Deferred_item_id",
"ResolveThis",
"SimpleItem",
]
)
# Regression for #16409 - make sure defer() and only() work with annotate()
self.assertIsInstance(list(SimpleItem.objects.annotate(Count('feature')).defer('name')), list)
self.assertIsInstance(list(SimpleItem.objects.annotate(Count('feature')).only('name')), list)
def test_only_and_defer_usage_on_proxy_models(self):
# Regression for #15790 - only() broken for proxy models
proxy = Proxy.objects.create(name="proxy", value=42)
msg = 'QuerySet.only() return bogus results with proxy models'
dp = Proxy.objects.only('other_value').get(pk=proxy.pk)
self.assertEqual(dp.name, proxy.name, msg=msg)
self.assertEqual(dp.value, proxy.value, msg=msg)
# also test things with .defer()
msg = 'QuerySet.defer() return bogus results with proxy models'
dp = Proxy.objects.defer('name', 'text', 'value').get(pk=proxy.pk)
self.assertEqual(dp.name, proxy.name, msg=msg)
self.assertEqual(dp.value, proxy.value, msg=msg)
def test_resolve_columns(self):
rt = ResolveThis.objects.create(num=5.0, name='Foobar')
qs = ResolveThis.objects.defer('num')
self.assertEqual(1, qs.count())
self.assertEqual('Foobar', qs[0].name)<|fim▁end|> | )
|
<|file_name|>celery.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2017 Jesús Espino <[email protected]>
# Copyright (C) 2014-2017 David Barragán <[email protected]>
# Copyright (C) 2014-2017 Alejandro Alonso <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#<|fim▁hole|>
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
from django.conf import settings
try:
from settings import celery_local as celery_settings
except ImportError:
from settings import celery as celery_settings
app = Celery('taiga')
app.config_from_object(celery_settings)
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)<|fim▁end|> | # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import json
import django
from django.db import models
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models.base import ModelBase
from django.utils.encoding import smart_unicode
from django.db.models.signals import post_syncdb
from django.contrib.auth.models import Permission
import sys
import datetime
import decimal
if 4 < django.VERSION[1] < 7:
AUTH_USER_MODEL = django.contrib.auth.get_user_model()
else:
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
def add_view_permissions(sender, **kwargs):
"""
This syncdb hooks takes care of adding a view permission too all our
content types.
"""
argv = sys.argv
permissions_with_tests = getattr(settings, "XADMIN_TEST_VIEW_PERMISSIONS", True)
if not permissions_with_tests and len(argv) > 1 \
and (argv[1] == "test" or argv[1] == "jenkins"):
return
# for each of our content types
for content_type in ContentType.objects.all():
# build our permission slug
codename = "view_%s" % content_type.model
# if it doesn't exist..
if not Permission.objects.filter(content_type=content_type, codename=codename):
# add it
Permission.objects.create(content_type=content_type,
codename=codename,
name="Can view %s" % content_type.name)
#print "Added view permission for %s" % content_type.name
# check for all our view permissions after a syncdb
post_syncdb.connect(add_view_permissions)
class Bookmark(models.Model):
title = models.CharField(_(u'Title'), max_length=128)
user = models.ForeignKey(AUTH_USER_MODEL, verbose_name=_(u"user"), blank=True, null=True)
url_name = models.CharField(_(u'Url Name'), max_length=64)
content_type = models.ForeignKey(ContentType)
query = models.CharField(_(u'Query String'), max_length=1000, blank=True)
is_share = models.BooleanField(_(u'Is Shared'), default=False)
@property
def url(self):
base_url = reverse(self.url_name)
if self.query:
base_url = base_url + '?' + self.query
return base_url
def __unicode__(self):
return self.title
class Meta:
verbose_name = _(u'Bookmark')
verbose_name_plural = _('Bookmarks')
class JSONEncoder(DjangoJSONEncoder):
def default(self, o):<|fim▁hole|> elif isinstance(o, datetime.datetime):
return o.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(o, decimal.Decimal):
return str(o)
elif isinstance(o, ModelBase):
return '%s.%s' % (o._meta.app_label, o._meta.model_name)
else:
try:
return super(JSONEncoder, self).default(o)
except Exception:
return smart_unicode(o)
class UserSettings(models.Model):
user = models.ForeignKey(AUTH_USER_MODEL, verbose_name=_(u"user"))
key = models.CharField(_('Settings Key'), max_length=256)
value = models.TextField(_('Settings Content'))
def json_value(self):
return json.loads(self.value)
def set_json(self, obj):
self.value = json.dumps(obj, cls=JSONEncoder, ensure_ascii=False)
def __unicode__(self):
return "%s %s" % (self.user, self.key)
class Meta:
verbose_name = _(u'User Setting')
verbose_name_plural = _('User Settings')
class UserWidget(models.Model):
user = models.ForeignKey(AUTH_USER_MODEL, verbose_name=_(u"user"))
page_id = models.CharField(_(u"Page"), max_length=256)
widget_type = models.CharField(_(u"Widget Type"), max_length=50)
value = models.TextField(_(u"Widget Params"))
def get_value(self):
value = json.loads(self.value)
value['id'] = self.id
value['type'] = self.widget_type
return value
def set_value(self, obj):
self.value = json.dumps(obj, cls=JSONEncoder, ensure_ascii=False)
def save(self, *args, **kwargs):
created = self.pk is None
super(UserWidget, self).save(*args, **kwargs)
if created:
try:
portal_pos = UserSettings.objects.get(
user=self.user, key="dashboard:%s:pos" % self.page_id)
portal_pos.value = "%s,%s" % (self.pk, portal_pos.value) if portal_pos.value else self.pk
portal_pos.save()
except Exception:
pass
def __unicode__(self):
return "%s %s widget" % (self.user, self.widget_type)
class Meta:
verbose_name = _(u'User Widget')
verbose_name_plural = _('User Widgets')<|fim▁end|> | if isinstance(o, datetime.date):
return o.strftime('%Y-%m-%d') |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | uucore::bin!(uu_tee); |
<|file_name|>Forwarding.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import Axon
from Kamaelia.Util.Backplane import *
from Kamaelia.Util.Console import *
from Kamaelia.Chassis.Pipeline import Pipeline
class Source(Axon.ThreadedComponent.threadedcomponent):
value = 1
sleep = 1
def main(self):
while 1:
self.send(str(self.value), "outbox")
time.sleep(self.sleep)
Backplane("broadcast").activate()
<|fim▁hole|>Pipeline(
Source(),
SubscribeTo("broadcast"),
ConsoleEchoer(),
).activate()
Pipeline(
ConsoleReader(),
PublishTo("broadcast", forwarder=True),
ConsoleEchoer(),
).run()<|fim▁end|> | |
<|file_name|>MysqlHelper.py<|end_file_name|><|fim▁begin|>#encoding=utf-8
import pymysql
import json
class MysqlHelper:
"""mysql 帮助类"""
@staticmethod
def insert(word,asymbol,esymbol,explain,cizu,liju,xiangguancihui,aspoken,espoken):
db=pymysql.connect(host="192.168.180.187",user="root",password="123456",db="lytest",charset="utf8")
cursor=db.cursor()
print(word.encode("utf8"))
print("--------------------------------insert into mysql db")
cursor.execute("insert into mfg_t_wordtest (f_word,f_asymbol,f_esymbol,f_explain,f_cizu,f_liju,f_xiangguancihui,f_aspoken,f_espoken,f_biaoji,f_type) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,0,0)",(word,asymbol,esymbol,"{"+json.dumps(explain,ensure_ascii=False,indent=2)+"}",json.dumps(cizu,ensure_ascii=False,indent=2),json.dumps(liju,ensure_ascii=False,indent=2),json.dumps(xiangguancihui,ensure_ascii=False,indent=2),aspoken,espoken))
db.commit()<|fim▁hole|> db.close()<|fim▁end|> | |
<|file_name|>default.cc<|end_file_name|><|fim▁begin|>// { dg-options "-std=c++0x" }
// { dg-require-cstdint "" }
//
// 2008-11-24 Edward M. Smith-Rowland <[email protected]>
//
// Copyright (C) 2008-2013 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.<|fim▁hole|>// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include <random>
#include <testsuite_hooks.h>
void
test01()
{
bool test __attribute__((unused)) = true;
std::subtract_with_carry_engine<unsigned long, 24, 10, 24> x;
VERIFY( x.min() == 0 );
VERIFY( x.max() == ((1UL << 24) - 1) );
VERIFY( x() == 15039276 );
}
int main()
{
test01();
return 0;
}<|fim▁end|> | //
// You should have received a copy of the GNU General Public License along |
<|file_name|>untagged_value.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate unsafe_unions;
unsafe_unions!{
union UntaggedValue: [u64; 3] {<|fim▁hole|> string: String,
}
}
fn main(){
unsafe {
let mut val = UntaggedValue::<()>::integer(200);
assert_eq!(*val.by_ref().integer(), 200);
*val.by_mut().boolean() = false;
assert_eq!(*val.by_ref().boolean(), false);
val.write().string("foobar".to_owned());
assert_eq!(&**val.by_ref().string(), "foobar");
drop(val.read().string());
}
}<|fim▁end|> | nil: (),
boolean: bool,
integer: i64,
floating: f64, |
<|file_name|>objecttoreturn.py<|end_file_name|><|fim▁begin|>try:
import exceptions
except ImportError: # Python 3
import builtins as exceptions
class ObjectToReturn:
<|fim▁hole|> self.name = name
def __str__(self):
return self.name
def exception(self, name, msg=""):
exception = getattr(exceptions, name)
raise exception(msg)<|fim▁end|> | def __init__(self, name): |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub mod solver;
pub mod utilities;
pub mod target_function;<|fim▁end|> | pub mod inf_num;
pub mod function;
pub mod constraint; |
<|file_name|>cgnv6_ddos_protection_stats.py<|end_file_name|><|fim▁begin|>from a10sdk.common.A10BaseClass import A10BaseClass
class Stats(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param ip_other_block_alloc: {"optional": true, "size": "8", "type": "number", "oid": "17", "format": "counter"}
:param entry_match_drop: {"optional": true, "size": "8", "type": "number", "oid": "6", "format": "counter"}
:param ip_port_block_free: {"optional": true, "size": "8", "type": "number", "oid": "15", "format": "counter"}
:param ip_node_alloc_failure: {"optional": true, "size": "8", "type": "number", "oid": "13", "format": "counter"}
:param entry_list_alloc_failure: {"optional": true, "size": "8", "type": "number", "oid": "10", "format": "counter"}
:param ip_node_alloc: {"optional": true, "size": "8", "type": "number", "oid": "11", "format": "counter"}
:param entry_added_shadow: {"optional": true, "size": "8", "type": "number", "oid": "20", "format": "counter"}
:param ip_port_block_alloc_failure: {"optional": true, "size": "8", "type": "number", "oid": "16", "format": "counter"}
:param ip_other_block_alloc_failure: {"optional": true, "size": "8", "type": "number", "oid": "19", "format": "counter"}
:param entry_removed_from_hw: {"optional": true, "size": "8", "type": "number", "oid": "4", "format": "counter"}
:param entry_deleted: {"optional": true, "size": "8", "type": "number", "oid": "2", "format": "counter"}
:param entry_list_alloc: {"optional": true, "size": "8", "type": "number", "oid": "8", "format": "counter"}
:param entry_list_free: {"optional": true, "size": "8", "type": "number", "oid": "9", "format": "counter"}
:param entry_added_to_hw: {"optional": true, "size": "8", "type": "number", "oid": "3", "format": "counter"}
:param ip_node_free: {"optional": true, "size": "8", "type": "number", "oid": "12", "format": "counter"}
:param entry_added: {"optional": true, "size": "8", "type": "number", "oid": "1", "format": "counter"}
:param ip_other_block_free: {"optional": true, "size": "8", "type": "number", "oid": "18", "format": "counter"}
:param entry_invalidated: {"optional": true, "size": "8", "type": "number", "oid": "21", "format": "counter"}
:param ip_port_block_alloc: {"optional": true, "size": "8", "type": "number", "oid": "14", "format": "counter"}
:param entry_match_drop_hw: {"optional": true, "size": "8", "type": "number", "oid": "7", "format": "counter"}
:param hw_out_of_entries: {"optional": true, "size": "8", "type": "number", "oid": "5", "format": "counter"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "stats"
self.DeviceProxy = ""
self.ip_other_block_alloc = ""
self.entry_match_drop = ""
self.ip_port_block_free = ""
self.ip_node_alloc_failure = ""
self.entry_list_alloc_failure = ""
self.ip_node_alloc = ""
self.entry_added_shadow = ""
self.ip_port_block_alloc_failure = ""
self.ip_other_block_alloc_failure = ""
self.entry_removed_from_hw = ""
self.entry_deleted = ""
self.entry_list_alloc = ""
self.entry_list_free = ""
self.entry_added_to_hw = ""
self.ip_node_free = ""
self.entry_added = ""
self.ip_other_block_free = ""
self.entry_invalidated = ""
self.ip_port_block_alloc = ""
self.entry_match_drop_hw = ""
self.hw_out_of_entries = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class DdosProtection(A10BaseClass):
<|fim▁hole|> Class ddos-protection supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/ddos-protection/stats`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "ddos-protection"
self.a10_url="/axapi/v3/cgnv6/ddos-protection/stats"
self.DeviceProxy = ""
self.stats = {}
for keys, value in kwargs.items():
setattr(self,keys, value)<|fim▁end|> | """Class Description::
Statistics for the object ddos-protection.
|
<|file_name|>crawl.py<|end_file_name|><|fim▁begin|>#!python
import sys
import io
import re
import urllib
import urllib2
import urlparse
import lxml.etree
def get_outlinks(url):
'''
url: the url to the page to crawl
'''
result = []
if url is None:
return result
html = None
resp = None
try:
url = url.strip()
<|fim▁hole|> html = resp.read()
except (urllib2.URLError, Exception) as e:
print "can't access {0}: {1}".format(url, e)
finally:
if resp is not None:
resp.close()
if html is None:
return result
html_parser = lxml.etree.HTMLParser()
try:
uhtml = html.decode('utf-8', 'ignore')
tree = lxml.etree.parse(io.StringIO(uhtml), html_parser)
anchors = tree.xpath('//a')
for anchor in anchors:
href = anchor.attrib.get('href', None)
if href is not None:
href = href.strip()
dest = urlparse.urljoin(url, href)
if dest.startswith('http://'):
result.append(dest)
except Exception as e:
print "can't parse {0}: {1}".format(url, e)
return result
def crawl(urls,
max_to_handle,
handle_url,
crawl_test = None,
handle_test = None):
handled = []
visited = set()
i = 0
p = 0
while len(handled) < max_to_handle and i < len(urls):
url = urls[i]
if url not in visited and crawl_test(url):
outlinks = get_outlinks(url)
visited.add(url)
urls.extend(outlinks)
if handle_test(url) and url not in handled:
handle_url(url, p + 1, max_to_handle)
handled.append(url)
p += 1
i += 1
return handled
def call_semantics_service(url, i, max_to_handle):
service_pattern = "http://ecology-service.cse.tamu.edu/BigSemanticsService/metadata.xml?url={0}"
qurl = urllib.quote(url)
surl = service_pattern.format(qurl)
resp = urllib2.urlopen(surl)
content = resp.read()
is_downloaded = content.find('DOWNLOAD_DONE') >= 0
is_typed = content.find('</amazon_product>') >= 0
if resp.code == 200 and is_downloaded and is_typed:
print "[{0}/{1}] service called on {2}".format(
i, max_to_handle, url)
else:
print "[{0}/{1}] error calling service: {2}: c={3}, d={4}, t={5}".format(
i, max_to_handle, surl, resp.code, is_downloaded, is_typed)
def call_downloader_service(url, i, max_to_handle):
agent = "Mozilla%2F5.0%20(Windows%20NT%206.2%3B%20Win64%3B%20x64)%20AppleWebKit%2F537.36%20(KHTML%2C%20like%20Gecko)%20Chrome%2F32.0.1667.0%20Safari%2F537.36"
service_pattern = "http://ecology-service.cse.tamu.edu/DownloaderPool/page/download.xml?url={0}&agent={1}"
qurl = urllib.quote(url)
resp = urllib2.urlopen(service_pattern.format(qurl, agent))
if resp.code == 200:
print "[{0}/{1}] successful downloading invocation on {2}".format(
i, max_to_handle, url)
else:
print "[{0}/{1}] downloading error code {2} for {3}".format(
i, max_to_handle, resp.code, url)
if __name__ == '__main__':
if len(sys.argv) < 3:
print "usage: {0} <url_lst_file> <max_to_handle>".format(sys.argv[0])
quit()
f = open(sys.argv[1])
urls = f.readlines()
n = int(sys.argv[2])
crawl_test = lambda(url): url.find('amazon.com') > 0;
p_prod = r'^http://www.amazon.com/([^/]+/)?dp/[^/]+';
handle_test = lambda(url): re.search(p_prod, url) is not None;
handled = crawl(urls, n, call_semantics_service, crawl_test, handle_test);
for url in handled:
print url<|fim▁end|> | resp = urllib2.urlopen(url)
if resp.code == 200:
|
<|file_name|>sync-rwlock-write-mode-shouldnt-escape.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: lifetime of variable does not enclose its declaration
extern mod extra;
use extra::sync;
fn main() {
let x = ~sync::RWLock::new();
let mut y = None;
do x.write_downgrade |write_mode| {
y = Some(write_mode);
}
// Adding this line causes a method unification failure instead<|fim▁hole|><|fim▁end|> | // do (&option::unwrap(y)).write { }
} |
<|file_name|>db.py<|end_file_name|><|fim▁begin|># Copyright 2015-2016 Hewlett Packard Enterprise Development Company, LP
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import constants as api_const
from neutron_lib.api.definitions import l3 as l3_apidef
from neutron_lib.api.definitions import network as net_def
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib.db import api as db_api
from neutron_lib.db import utils as db_utils
from neutron_lib import exceptions as n_exc
from neutron_lib.objects import exceptions as obj_exc
from neutron_lib.plugins import constants
from neutron_lib.plugins import directory
from neutron_lib.plugins import utils as p_utils
from oslo_log import log as logging
from neutron._i18n import _
from neutron.common import exceptions as c_exc
from neutron.db import _resource_extend as resource_extend
from neutron.db import common_db_mixin
from neutron.objects import auto_allocate as auto_allocate_obj
from neutron.objects import base as base_obj
from neutron.objects import network as net_obj
from neutron.services.auto_allocate import exceptions
LOG = logging.getLogger(__name__)
CHECK_REQUIREMENTS = 'dry-run'
def _ensure_external_network_default_value_callback(
resource, event, trigger, **kwargs):
"""Ensure the is_default db field matches the create/update request."""
# TODO(boden): remove shim once all callbacks use payloads
if 'payload' in kwargs:
_request = kwargs['payload'].request_body
_context = kwargs['payload'].context
_network = kwargs['payload'].desired_state
_orig = kwargs['payload'].states[0]
else:
_request = kwargs['request']
_context = kwargs['context']
_network = kwargs['network']
_orig = kwargs.get('original_network')
@db_api.retry_if_session_inactive()
def _do_ensure_external_network_default_value_callback(
context, request, orig, network):
is_default = request.get(api_const.IS_DEFAULT)
if is_default is None:
return
if is_default:
# ensure only one default external network at any given time
pager = base_obj.Pager(limit=1)
objs = net_obj.ExternalNetwork.get_objects(context,
_pager=pager, is_default=True)
if objs:
if objs[0] and network['id'] != objs[0].network_id:
raise exceptions.DefaultExternalNetworkExists(
net_id=objs[0].network_id)
if orig and orig.get(api_const.IS_DEFAULT) == is_default:
return
network[api_const.IS_DEFAULT] = is_default
# Reflect the status of the is_default on the create/update request
obj = net_obj.ExternalNetwork.get_object(context,
network_id=network['id'])
if obj:
obj.is_default = is_default
obj.update()
_do_ensure_external_network_default_value_callback(
_context, _request, _orig, _network)
@resource_extend.has_resource_extenders
class AutoAllocatedTopologyMixin(common_db_mixin.CommonDbMixin):
def __new__(cls, *args, **kwargs):
# NOTE(kevinbenton): we subscribe on object construction because
# the tests blow away the callback manager for each run
new = super(AutoAllocatedTopologyMixin, cls).__new__(cls, *args,
**kwargs)
registry.subscribe(_ensure_external_network_default_value_callback,
resources.NETWORK, events.PRECOMMIT_UPDATE)
registry.subscribe(_ensure_external_network_default_value_callback,
resources.NETWORK, events.PRECOMMIT_CREATE)
return new
# TODO(armax): if a tenant modifies auto allocated resources under
# the hood the behavior of the get_auto_allocated_topology API is
# undetermined. Consider adding callbacks to deal with the following
# situations:
# - insert subnet -> plug router interface
# - delete router -> remove the entire topology
# - update subnet -> prevent operation
# - update router gateway -> prevent operation
# - ...
@property
def core_plugin(self):
if not getattr(self, '_core_plugin', None):
self._core_plugin = directory.get_plugin()
return self._core_plugin
@property
def l3_plugin(self):
if not getattr(self, '_l3_plugin', None):
self._l3_plugin = directory.get_plugin(constants.L3)
return self._l3_plugin
@staticmethod
@resource_extend.extends([net_def.COLLECTION_NAME])
def _extend_external_network_default(net_res, net_db):
"""Add is_default field to 'show' response."""
if net_db.external is not None:
net_res[api_const.IS_DEFAULT] = net_db.external.is_default
return net_res
def get_auto_allocated_topology(self, context, tenant_id, fields=None):
"""Return tenant's network associated to auto-allocated topology.
The topology will be provisioned upon return, if network is missing.
"""
fields = fields or []
tenant_id = self._validate(context, tenant_id)
if CHECK_REQUIREMENTS in fields:
# for dry-run requests, simply validates that subsequent
# requests can be fulfilled based on a set of requirements
# such as existence of default networks, pools, etc.
return self._check_requirements(context, tenant_id)
elif fields:
raise n_exc.BadRequest(resource='auto_allocate',
msg=_("Unrecognized field"))
# Check for an existent topology
network_id = self._get_auto_allocated_network(context, tenant_id)
if network_id:
return self._response(network_id, tenant_id, fields=fields)
# See if we indeed have an external network to connect to, otherwise
# we will fail fast
default_external_network = self._get_default_external_network(
context)
# If we reach this point, then we got some work to do!
network_id = self._build_topology(
context, tenant_id, default_external_network)
return self._response(network_id, tenant_id, fields=fields)
def delete_auto_allocated_topology(self, context, tenant_id):
tenant_id = self._validate(context, tenant_id)
topology = self._get_auto_allocated_topology(context, tenant_id)
if topology:
subnets = self.core_plugin.get_subnets(
context,
filters={'network_id': [topology['network_id']]})
self._cleanup(
context, network_id=topology['network_id'],
router_id=topology['router_id'], subnets=subnets)
def _build_topology(self, context, tenant_id, default_external_network):
"""Build the network topology and returns its network UUID."""
try:
subnets = self._provision_tenant_private_network(
context, tenant_id)
network_id = subnets[0]['network_id']
router = self._provision_external_connectivity(
context, default_external_network, subnets, tenant_id)
network_id = self._save(
context, tenant_id, network_id, router['id'], subnets)
return network_id
except exceptions.UnknownProvisioningError as e:
# Clean partially provisioned topologies, and reraise the
# error. If it can be retried, so be it.
LOG.error("Unknown error while provisioning topology for "
"tenant %(tenant_id)s. Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
self._cleanup(
context, network_id=e.network_id,
router_id=e.router_id, subnets=e.subnets)
raise e.error
def _check_requirements(self, context, tenant_id):
"""Raise if requirements are not met."""
self._get_default_external_network(context)
try:
self._get_supported_subnetpools(context)
except n_exc.NotFound:
raise exceptions.AutoAllocationFailure(
reason=_("No default subnetpools defined"))
return {'id': 'dry-run=pass', 'tenant_id': tenant_id}
def _validate(self, context, tenant_id):
"""Validate and return the tenant to be associated to the topology."""
if tenant_id == 'None':
# NOTE(HenryG): the client might be sending us astray by
# passing no tenant; this is really meant to be the tenant
# issuing the request, therefore let's get it from the context
tenant_id = context.tenant_id
if not context.is_admin and tenant_id != context.tenant_id:
raise n_exc.NotAuthorized()
return tenant_id
<|fim▁hole|> """Return the auto allocated topology record if present or None."""
return auto_allocate_obj.AutoAllocatedTopology.get_object(
context, project_id=tenant_id)
def _get_auto_allocated_network(self, context, tenant_id):
"""Get the auto allocated network for the tenant."""
network = self._get_auto_allocated_topology(context, tenant_id)
if network:
return network['network_id']
@staticmethod
def _response(network_id, tenant_id, fields=None):
"""Build response for auto-allocated network."""
res = {
'id': network_id,
'tenant_id': tenant_id
}
return db_utils.resource_fields(res, fields)
def _get_default_external_network(self, context):
"""Get the default external network for the deployment."""
default_external_networks = net_obj.ExternalNetwork.get_objects(
context, is_default=True)
if not default_external_networks:
LOG.error("Unable to find default external network "
"for deployment, please create/assign one to "
"allow auto-allocation to work correctly.")
raise exceptions.AutoAllocationFailure(
reason=_("No default router:external network"))
if len(default_external_networks) > 1:
LOG.error("Multiple external default networks detected. "
"Network %s is true 'default'.",
default_external_networks[0]['network_id'])
return default_external_networks[0].network_id
def _get_supported_subnetpools(self, context):
"""Return the default subnet pools available."""
default_subnet_pools = [
self.core_plugin.get_default_subnetpool(
context, ver) for ver in (4, 6)
]
available_pools = [
s for s in default_subnet_pools if s
]
if not available_pools:
LOG.error("No default pools available")
raise n_exc.NotFound()
return available_pools
def _provision_tenant_private_network(self, context, tenant_id):
"""Create a tenant private network/subnets."""
network = None
try:
network_args = {
'name': 'auto_allocated_network',
'admin_state_up': False,
'tenant_id': tenant_id,
'shared': False
}
network = p_utils.create_network(
self.core_plugin, context, {'network': network_args})
subnets = []
for pool in self._get_supported_subnetpools(context):
subnet_args = {
'name': 'auto_allocated_subnet_v%s' % pool['ip_version'],
'network_id': network['id'],
'tenant_id': tenant_id,
'ip_version': pool['ip_version'],
'subnetpool_id': pool['id'],
}
subnets.append(p_utils.create_subnet(
self.core_plugin, context, {'subnet': subnet_args}))
return subnets
except (c_exc.SubnetAllocationError, ValueError,
n_exc.BadRequest, n_exc.NotFound) as e:
LOG.error("Unable to auto allocate topology for tenant "
"%(tenant_id)s due to missing or unmet "
"requirements. Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
if network:
self._cleanup(context, network['id'])
raise exceptions.AutoAllocationFailure(
reason=_("Unable to provide tenant private network"))
except Exception as e:
network_id = network['id'] if network else None
raise exceptions.UnknownProvisioningError(e, network_id=network_id)
def _provision_external_connectivity(
self, context, default_external_network, subnets, tenant_id):
"""Uplink tenant subnet(s) to external network."""
router_args = {
'name': 'auto_allocated_router',
l3_apidef.EXTERNAL_GW_INFO: {
'network_id': default_external_network},
'tenant_id': tenant_id,
'admin_state_up': True
}
router = None
attached_subnets = []
try:
router = self.l3_plugin.create_router(
context, {'router': router_args})
for subnet in subnets:
self.l3_plugin.add_router_interface(
context, router['id'], {'subnet_id': subnet['id']})
attached_subnets.append(subnet)
return router
except n_exc.BadRequest as e:
LOG.error("Unable to auto allocate topology for tenant "
"%(tenant_id)s because of router errors. "
"Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
router_id = router['id'] if router else None
self._cleanup(context,
network_id=subnets[0]['network_id'],
router_id=router_id, subnets=attached_subnets)
raise exceptions.AutoAllocationFailure(
reason=_("Unable to provide external connectivity"))
except Exception as e:
router_id = router['id'] if router else None
raise exceptions.UnknownProvisioningError(
e, network_id=subnets[0]['network_id'],
router_id=router_id, subnets=subnets)
def _save(self, context, tenant_id, network_id, router_id, subnets):
"""Save auto-allocated topology, or revert in case of DB errors."""
try:
auto_allocate_obj.AutoAllocatedTopology(
context, project_id=tenant_id, network_id=network_id,
router_id=router_id).create()
self.core_plugin.update_network(
context, network_id,
{'network': {'admin_state_up': True}})
except obj_exc.NeutronDbObjectDuplicateEntry:
LOG.debug("Multiple auto-allocated networks detected for "
"tenant %s. Attempting clean up for network %s "
"and router %s.",
tenant_id, network_id, router_id)
self._cleanup(
context, network_id=network_id,
router_id=router_id, subnets=subnets)
network_id = self._get_auto_allocated_network(context, tenant_id)
except Exception as e:
raise exceptions.UnknownProvisioningError(
e, network_id=network_id,
router_id=router_id, subnets=subnets)
return network_id
def _cleanup(self, context, network_id=None, router_id=None, subnets=None):
"""Clean up auto allocated resources."""
# Concurrent attempts to delete the topology may interleave and
# cause some operations to fail with NotFound exceptions. Rather
# than fail partially, the exceptions should be ignored and the
# cleanup should proceed uninterrupted.
if router_id:
for subnet in subnets or []:
ignore_notfound(
self.l3_plugin.remove_router_interface,
context, router_id, {'subnet_id': subnet['id']})
ignore_notfound(self.l3_plugin.delete_router, context, router_id)
if network_id:
ignore_notfound(
self.core_plugin.delete_network, context, network_id)
def ignore_notfound(func, *args, **kwargs):
"""Call the given function and pass if a `NotFound` exception is raised."""
try:
return func(*args, **kwargs)
except n_exc.NotFound:
pass<|fim▁end|> | def _get_auto_allocated_topology(self, context, tenant_id): |
<|file_name|>json.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "alloc")]
#[macro_use]
extern crate nom;
extern crate jemallocator;
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
use nom::{Err, IResult, Offset, error::{VerboseError, VerboseErrorKind}};
use nom::{
character::complete::alphanumeric1 as alphanumeric,
bytes::complete::{take_while, tag},
multi::separated_listc,
branch::alt,
sequence::{preceded, terminated}, error::context
};
use nom::character::complete::char;
use nom::number::complete::recognize_float;
use nom::error::{ErrorKind,ParseError};
use std::str;
use std::iter::repeat;
use std::collections::HashMap;
#[derive(Debug, PartialEq)]
pub enum JsonValue {
Str(String),
Boolean(bool),
Num(f64),
Array(Vec<JsonValue>),
Object(HashMap<String, JsonValue>),
}
fn sp<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
let chars = " \t\r\n";
take_while(move |c| chars.contains(c))(i)
}
fn float<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, f64, E> {
flat_map!(i, recognize_float, parse_to!(f64))
}
fn parse_str<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
escaped!(i, call!(alphanumeric), '\\', one_of!("\"n\\"))
}
fn string<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
//delimitedc(i, char('\"'), parse_str, char('\"'))
let (i, _) = char('\"')(i)?;
//context("string", |i| terminatedc(i, parse_str, char('\"')))(i)
context("string", terminated(parse_str, char('\"')))(i)
}
fn boolean<'a, E: ParseError<&'a str>>(input: &'a str) ->IResult<&'a str, bool, E> {
alt( (
|i| tag("false")(i).map(|(i,_)| (i, false)),
|i| tag("true")(i).map(|(i,_)| (i, true))
))(input)
/*
match tag::<&'static str, &'a str, E>("false")(i) {
Ok((i, _)) => Ok((i, false)),
Err(_) => tag("true")(i).map(|(i,_)| (i, true))
}
*/
}
fn array<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, Vec<JsonValue>, E> {
let (i, _) = char('[')(i)?;
/*context(
"array",
|i| terminatedc(i,
|i| separated_listc(i, |i| precededc(i, sp, char(',')), value),
|i| precededc(i, sp, char(']')))
)(i)*/
context(
"array",
terminated(
|i| separated_listc(i, preceded(sp, char(',')), value),
preceded(sp, char(']')))
)(i)
}
fn key_value<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, (&'a str, JsonValue), E> {
separated_pair!(i, preceded!(sp, string), preceded!(sp, char!(':')), value)
}
fn hash<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, HashMap<String, JsonValue>, E> {
let (i, _) = char('{')(i)?;
context(
"map",
terminated(
|i| map!(i,
separated_list!(preceded!(sp, char!(',')), key_value),
|tuple_vec| tuple_vec
.into_iter()
.map(|(k, v)| (String::from(k), v))
.collect()
),
preceded(sp, char('}')))
)(i)
/*
map!(i,
delimited!(
char!('{'),
separated_list!(preceded!(sp, char!(',')), key_value),
preceded!(sp, char!('}'))
),
|tuple_vec| tuple_vec
.into_iter()
.map(|(k, v)| (String::from(k), v))
.collect()
)
*/
}
fn value<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, JsonValue, E> {
preceded!(i,
sp,
alt!(
hash => { |h| JsonValue::Object(h) } |
array => { |v| JsonValue::Array(v) } |
string => { |s| JsonValue::Str(String::from(s)) } |
float => { |f| JsonValue::Num(f) } |
boolean => { |b| JsonValue::Boolean(b) }
))
}
fn root<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, JsonValue, E> {
delimited!(i,
sp,
alt( (
|input| hash(input).map(|(i,h)| (i, JsonValue::Object(h))),
|input| array(input).map(|(i,v)| (i, JsonValue::Array(v)))
) ),
/*alt!(
hash => { |h| JsonValue::Object(h) } |
array => { |v| JsonValue::Array(v) }
),*/
not!(complete!(sp)))
}
fn convert_error(input: &str, e: VerboseError<&str>) -> String {
let lines: Vec<_> = input.lines().map(String::from).collect();
//println!("lines: {:#?}", lines);
let mut result = String::new();
for (i, (substring, kind)) in e.errors.iter().enumerate() {
let mut offset = input.offset(substring);
let mut line = 0;
let mut column = 0;
for (j,l) in lines.iter().enumerate() {
if offset <= l.len() {
line = j;
column = offset;
break;
} else {
offset = offset - l.len();
}
}
match kind {
VerboseErrorKind::Char(c) => {
result += &format!("{}: at line {}:\n", i, line);
result += &lines[line];
result += "\n";
if column > 0 {
result += &repeat(' ').take(column-1).collect::<String>();
}
result += "^\n";
result += &format!("expected '{}', found {}\n\n", c, substring.chars().next().unwrap());
},
VerboseErrorKind::Context(s) => {
result += &format!("{}: at line {}, in {}:\n", i, line, s);
result += &lines[line];
result += "\n";
if column > 0 {
result += &repeat(' ').take(column -1).collect::<String>();
}
result += "^\n\n";
}
_ => {}
}
}
result
}
fn main() {
let data = " { \"a\"\t: 42,<|fim▁hole|> }
} ";
println!("will try to parse:\n\n**********\n{}\n**********\n", data);
println!("basic errors - `root::<(&str, ErrorKind)>(data)`:\n{:#?}\n", root::<(&str, ErrorKind)>(data));
println!("parsed verbose: {:#?}", root::<VerboseError<&str>>(data));
match root::<VerboseError<&str>>(data) {
Err(Err::Error(e)) | Err(Err::Failure(e)) => {
println!("verbose errors - `root::<VerboseError>(data)`:\n{}", convert_error(data, e));
},
_ => panic!(),
}
}<|fim▁end|> | \"b\": [ \"x\", \"y\", 12 ] ,
\"c\": { 1\"hello\" : \"world\" |
<|file_name|>align.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Values for CSS Box Alignment properties
//!
//! https://drafts.csswg.org/css-align/
use crate::parser::{Parse, ParserContext};
use cssparser::Parser;
use std::fmt::{self, Write};
use style_traits::{CssWriter, KeywordsCollectFn, ParseError, SpecifiedValueInfo, ToCss};
bitflags! {
/// Constants shared by multiple CSS Box Alignment properties
#[derive(MallocSizeOf, ToComputedValue, ToResolvedValue, ToShmem)]
#[repr(C)]
pub struct AlignFlags: u8 {
// Enumeration stored in the lower 5 bits:
/// {align,justify}-{content,items,self}: 'auto'
const AUTO = 0;
/// 'normal'
const NORMAL = 1;
/// 'start'
const START = 2;
/// 'end'
const END = 3;
/// 'flex-start'
const FLEX_START = 4;
/// 'flex-end'
const FLEX_END = 5;
/// 'center'
const CENTER = 6;
/// 'left'
const LEFT = 7;
/// 'right'
const RIGHT = 8;
/// 'baseline'
const BASELINE = 9;
/// 'last-baseline'
const LAST_BASELINE = 10;
/// 'stretch'
const STRETCH = 11;
/// 'self-start'
const SELF_START = 12;
/// 'self-end'
const SELF_END = 13;
/// 'space-between'
const SPACE_BETWEEN = 14;
/// 'space-around'
const SPACE_AROUND = 15;
/// 'space-evenly'
const SPACE_EVENLY = 16;
// Additional flags stored in the upper bits:
/// 'legacy' (mutually exclusive w. SAFE & UNSAFE)
const LEGACY = 1 << 5;
/// 'safe'
const SAFE = 1 << 6;
/// 'unsafe' (mutually exclusive w. SAFE)
const UNSAFE = 1 << 7;
/// Mask for the additional flags above.
const FLAG_BITS = 0b11100000;
}
}
impl AlignFlags {
/// Returns the enumeration value stored in the lower 5 bits.
#[inline]
fn value(&self) -> Self {
*self & !AlignFlags::FLAG_BITS
}
}
impl ToCss for AlignFlags {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
let extra_flags = *self & AlignFlags::FLAG_BITS;<|fim▁hole|> match extra_flags {
AlignFlags::LEGACY => {
dest.write_str("legacy")?;
if value.is_empty() {
return Ok(());
}
dest.write_char(' ')?;
},
AlignFlags::SAFE => dest.write_str("safe ")?,
AlignFlags::UNSAFE => dest.write_str("unsafe ")?,
_ => {
debug_assert_eq!(extra_flags, AlignFlags::empty());
},
}
dest.write_str(match value {
AlignFlags::AUTO => "auto",
AlignFlags::NORMAL => "normal",
AlignFlags::START => "start",
AlignFlags::END => "end",
AlignFlags::FLEX_START => "flex-start",
AlignFlags::FLEX_END => "flex-end",
AlignFlags::CENTER => "center",
AlignFlags::LEFT => "left",
AlignFlags::RIGHT => "right",
AlignFlags::BASELINE => "baseline",
AlignFlags::LAST_BASELINE => "last baseline",
AlignFlags::STRETCH => "stretch",
AlignFlags::SELF_START => "self-start",
AlignFlags::SELF_END => "self-end",
AlignFlags::SPACE_BETWEEN => "space-between",
AlignFlags::SPACE_AROUND => "space-around",
AlignFlags::SPACE_EVENLY => "space-evenly",
_ => unreachable!(),
})
}
}
/// An axis direction, either inline (for the `justify` properties) or block,
/// (for the `align` properties).
#[derive(Clone, Copy, PartialEq)]
pub enum AxisDirection {
/// Block direction.
Block,
/// Inline direction.
Inline,
}
/// Shared value for the `align-content` and `justify-content` properties.
///
/// <https://drafts.csswg.org/css-align/#content-distribution>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
pub struct ContentDistribution {
primary: AlignFlags,
// FIXME(https://github.com/w3c/csswg-drafts/issues/1002): This will need to
// accept fallback alignment, eventually.
}
impl ContentDistribution {
/// The initial value 'normal'
#[inline]
pub fn normal() -> Self {
Self::new(AlignFlags::NORMAL)
}
/// `start`
#[inline]
pub fn start() -> Self {
Self::new(AlignFlags::START)
}
/// The initial value 'normal'
#[inline]
pub fn new(primary: AlignFlags) -> Self {
Self { primary }
}
fn from_bits(bits: u16) -> Self {
Self {
primary: AlignFlags::from_bits_truncate(bits as u8),
}
}
fn as_bits(&self) -> u16 {
self.primary.bits() as u16
}
/// Returns whether this value is a <baseline-position>.
pub fn is_baseline_position(&self) -> bool {
matches!(
self.primary.value(),
AlignFlags::BASELINE | AlignFlags::LAST_BASELINE
)
}
/// The primary alignment
#[inline]
pub fn primary(self) -> AlignFlags {
self.primary
}
/// Parse a value for align-content / justify-content.
pub fn parse<'i, 't>(
input: &mut Parser<'i, 't>,
axis: AxisDirection,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update the `list_keywords` function below
// when this function is updated.
// Try to parse normal first
if input.try(|i| i.expect_ident_matching("normal")).is_ok() {
return Ok(ContentDistribution::normal());
}
// Parse <baseline-position>, but only on the block axis.
if axis == AxisDirection::Block {
if let Ok(value) = input.try(parse_baseline) {
return Ok(ContentDistribution::new(value));
}
}
// <content-distribution>
if let Ok(value) = input.try(parse_content_distribution) {
return Ok(ContentDistribution::new(value));
}
// <overflow-position>? <content-position>
let overflow_position = input
.try(parse_overflow_position)
.unwrap_or(AlignFlags::empty());
let content_position = try_match_ident_ignore_ascii_case! { input,
"start" => AlignFlags::START,
"end" => AlignFlags::END,
"flex-start" => AlignFlags::FLEX_START,
"flex-end" => AlignFlags::FLEX_END,
"center" => AlignFlags::CENTER,
"left" if axis == AxisDirection::Inline => AlignFlags::LEFT,
"right" if axis == AxisDirection::Inline => AlignFlags::RIGHT,
};
Ok(ContentDistribution::new(
content_position | overflow_position,
))
}
fn list_keywords(f: KeywordsCollectFn, axis: AxisDirection) {
f(&["normal"]);
if axis == AxisDirection::Block {
list_baseline_keywords(f);
}
list_content_distribution_keywords(f);
list_overflow_position_keywords(f);
f(&["start", "end", "flex-start", "flex-end", "center"]);
if axis == AxisDirection::Inline {
f(&["left", "right"]);
}
}
}
/// Value for the `align-content` property.
///
/// <https://drafts.csswg.org/css-align/#propdef-align-content>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(transparent)]
pub struct AlignContent(pub ContentDistribution);
impl Parse for AlignContent {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
Ok(AlignContent(ContentDistribution::parse(
input,
AxisDirection::Block,
)?))
}
}
impl SpecifiedValueInfo for AlignContent {
fn collect_completion_keywords(f: KeywordsCollectFn) {
ContentDistribution::list_keywords(f, AxisDirection::Block);
}
}
/// Value for the `justify-content` property.
///
/// <https://drafts.csswg.org/css-align/#propdef-justify-content>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(transparent)]
pub struct JustifyContent(pub ContentDistribution);
impl Parse for JustifyContent {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
Ok(JustifyContent(ContentDistribution::parse(
input,
AxisDirection::Inline,
)?))
}
}
impl SpecifiedValueInfo for JustifyContent {
fn collect_completion_keywords(f: KeywordsCollectFn) {
ContentDistribution::list_keywords(f, AxisDirection::Inline);
}
}
#[cfg(feature = "gecko")]
impl From<u16> for JustifyContent {
fn from(bits: u16) -> Self {
JustifyContent(ContentDistribution::from_bits(bits))
}
}
#[cfg(feature = "gecko")]
impl From<JustifyContent> for u16 {
fn from(v: JustifyContent) -> u16 {
v.0.as_bits()
}
}
/// <https://drafts.csswg.org/css-align/#self-alignment>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(transparent)]
pub struct SelfAlignment(pub AlignFlags);
impl SelfAlignment {
/// The initial value 'auto'
#[inline]
pub fn auto() -> Self {
SelfAlignment(AlignFlags::AUTO)
}
/// Returns whether this value is valid for both axis directions.
pub fn is_valid_on_both_axes(&self) -> bool {
match self.0.value() {
// left | right are only allowed on the inline axis.
AlignFlags::LEFT | AlignFlags::RIGHT => false,
_ => true,
}
}
/// Parse a self-alignment value on one of the axis.
pub fn parse<'i, 't>(
input: &mut Parser<'i, 't>,
axis: AxisDirection,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update the `list_keywords` function below
// when this function is updated.
// <baseline-position>
//
// It's weird that this accepts <baseline-position>, but not
// justify-content...
if let Ok(value) = input.try(parse_baseline) {
return Ok(SelfAlignment(value));
}
// auto | normal | stretch
if let Ok(value) = input.try(parse_auto_normal_stretch) {
return Ok(SelfAlignment(value));
}
// <overflow-position>? <self-position>
let overflow_position = input
.try(parse_overflow_position)
.unwrap_or(AlignFlags::empty());
let self_position = parse_self_position(input, axis)?;
Ok(SelfAlignment(overflow_position | self_position))
}
fn list_keywords(f: KeywordsCollectFn, axis: AxisDirection) {
list_baseline_keywords(f);
list_auto_normal_stretch(f);
list_overflow_position_keywords(f);
list_self_position_keywords(f, axis);
}
}
/// The specified value of the align-self property.
///
/// <https://drafts.csswg.org/css-align/#propdef-align-self>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C)]
pub struct AlignSelf(pub SelfAlignment);
impl Parse for AlignSelf {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
Ok(AlignSelf(SelfAlignment::parse(
input,
AxisDirection::Block,
)?))
}
}
impl SpecifiedValueInfo for AlignSelf {
fn collect_completion_keywords(f: KeywordsCollectFn) {
SelfAlignment::list_keywords(f, AxisDirection::Block);
}
}
/// The specified value of the justify-self property.
///
/// <https://drafts.csswg.org/css-align/#propdef-justify-self>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C)]
pub struct JustifySelf(pub SelfAlignment);
impl Parse for JustifySelf {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
Ok(JustifySelf(SelfAlignment::parse(
input,
AxisDirection::Inline,
)?))
}
}
impl SpecifiedValueInfo for JustifySelf {
fn collect_completion_keywords(f: KeywordsCollectFn) {
SelfAlignment::list_keywords(f, AxisDirection::Inline);
}
}
/// Value of the `align-items` property
///
/// <https://drafts.csswg.org/css-align/#propdef-align-items>
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
PartialEq,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C)]
pub struct AlignItems(pub AlignFlags);
impl AlignItems {
/// The initial value 'normal'
#[inline]
pub fn normal() -> Self {
AlignItems(AlignFlags::NORMAL)
}
}
impl Parse for AlignItems {
// normal | stretch | <baseline-position> |
// <overflow-position>? <self-position>
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
// <baseline-position>
if let Ok(baseline) = input.try(parse_baseline) {
return Ok(AlignItems(baseline));
}
// normal | stretch
if let Ok(value) = input.try(parse_normal_stretch) {
return Ok(AlignItems(value));
}
// <overflow-position>? <self-position>
let overflow = input
.try(parse_overflow_position)
.unwrap_or(AlignFlags::empty());
let self_position = parse_self_position(input, AxisDirection::Block)?;
Ok(AlignItems(self_position | overflow))
}
}
impl SpecifiedValueInfo for AlignItems {
fn collect_completion_keywords(f: KeywordsCollectFn) {
list_baseline_keywords(f);
list_normal_stretch(f);
list_overflow_position_keywords(f);
list_self_position_keywords(f, AxisDirection::Block);
}
}
/// Value of the `justify-items` property
///
/// <https://drafts.csswg.org/css-align/#justify-items-property>
#[derive(Clone, Copy, Debug, Eq, MallocSizeOf, PartialEq, ToCss, ToShmem)]
#[repr(C)]
pub struct JustifyItems(pub AlignFlags);
impl JustifyItems {
/// The initial value 'legacy'
#[inline]
pub fn legacy() -> Self {
JustifyItems(AlignFlags::LEGACY)
}
/// The value 'normal'
#[inline]
pub fn normal() -> Self {
JustifyItems(AlignFlags::NORMAL)
}
}
impl Parse for JustifyItems {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// NOTE Please also update `impl SpecifiedValueInfo` below when
// this function is updated.
// <baseline-position>
//
// It's weird that this accepts <baseline-position>, but not
// justify-content...
if let Ok(baseline) = input.try(parse_baseline) {
return Ok(JustifyItems(baseline));
}
// normal | stretch
if let Ok(value) = input.try(parse_normal_stretch) {
return Ok(JustifyItems(value));
}
// legacy | [ legacy && [ left | right | center ] ]
if let Ok(value) = input.try(parse_legacy) {
return Ok(JustifyItems(value));
}
// <overflow-position>? <self-position>
let overflow = input
.try(parse_overflow_position)
.unwrap_or(AlignFlags::empty());
let self_position = parse_self_position(input, AxisDirection::Inline)?;
Ok(JustifyItems(overflow | self_position))
}
}
impl SpecifiedValueInfo for JustifyItems {
fn collect_completion_keywords(f: KeywordsCollectFn) {
list_baseline_keywords(f);
list_normal_stretch(f);
list_legacy_keywords(f);
list_overflow_position_keywords(f);
list_self_position_keywords(f, AxisDirection::Inline);
}
}
// auto | normal | stretch
fn parse_auto_normal_stretch<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_auto_normal_stretch` function
// below when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"auto" => Ok(AlignFlags::AUTO),
"normal" => Ok(AlignFlags::NORMAL),
"stretch" => Ok(AlignFlags::STRETCH),
}
}
fn list_auto_normal_stretch(f: KeywordsCollectFn) {
f(&["auto", "normal", "stretch"]);
}
// normal | stretch
fn parse_normal_stretch<'i, 't>(input: &mut Parser<'i, 't>) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_normal_stretch` function below
// when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"normal" => Ok(AlignFlags::NORMAL),
"stretch" => Ok(AlignFlags::STRETCH),
}
}
fn list_normal_stretch(f: KeywordsCollectFn) {
f(&["normal", "stretch"]);
}
// <baseline-position>
fn parse_baseline<'i, 't>(input: &mut Parser<'i, 't>) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_baseline_keywords` function
// below when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"baseline" => Ok(AlignFlags::BASELINE),
"first" => {
input.expect_ident_matching("baseline")?;
Ok(AlignFlags::BASELINE)
},
"last" => {
input.expect_ident_matching("baseline")?;
Ok(AlignFlags::LAST_BASELINE)
},
}
}
fn list_baseline_keywords(f: KeywordsCollectFn) {
f(&["baseline", "first baseline", "last baseline"]);
}
// <content-distribution>
fn parse_content_distribution<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_content_distribution_keywords`
// function below when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"stretch" => Ok(AlignFlags::STRETCH),
"space-between" => Ok(AlignFlags::SPACE_BETWEEN),
"space-around" => Ok(AlignFlags::SPACE_AROUND),
"space-evenly" => Ok(AlignFlags::SPACE_EVENLY),
}
}
fn list_content_distribution_keywords(f: KeywordsCollectFn) {
f(&["stretch", "space-between", "space-around", "space-evenly"]);
}
// <overflow-position>
fn parse_overflow_position<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_overflow_position_keywords`
// function below when this function is updated.
try_match_ident_ignore_ascii_case! { input,
"safe" => Ok(AlignFlags::SAFE),
"unsafe" => Ok(AlignFlags::UNSAFE),
}
}
fn list_overflow_position_keywords(f: KeywordsCollectFn) {
f(&["safe", "unsafe"]);
}
// <self-position> | left | right in the inline axis.
fn parse_self_position<'i, 't>(
input: &mut Parser<'i, 't>,
axis: AxisDirection,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_self_position_keywords`
// function below when this function is updated.
Ok(try_match_ident_ignore_ascii_case! { input,
"start" => AlignFlags::START,
"end" => AlignFlags::END,
"flex-start" => AlignFlags::FLEX_START,
"flex-end" => AlignFlags::FLEX_END,
"center" => AlignFlags::CENTER,
"self-start" => AlignFlags::SELF_START,
"self-end" => AlignFlags::SELF_END,
"left" if axis == AxisDirection::Inline => AlignFlags::LEFT,
"right" if axis == AxisDirection::Inline => AlignFlags::RIGHT,
})
}
fn list_self_position_keywords(f: KeywordsCollectFn, axis: AxisDirection) {
f(&[
"start",
"end",
"flex-start",
"flex-end",
"center",
"self-start",
"self-end",
]);
if axis == AxisDirection::Inline {
f(&["left", "right"]);
}
}
fn parse_left_right_center<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_legacy_keywords` function below
// when this function is updated.
Ok(try_match_ident_ignore_ascii_case! { input,
"left" => AlignFlags::LEFT,
"right" => AlignFlags::RIGHT,
"center" => AlignFlags::CENTER,
})
}
// legacy | [ legacy && [ left | right | center ] ]
fn parse_legacy<'i, 't>(input: &mut Parser<'i, 't>) -> Result<AlignFlags, ParseError<'i>> {
// NOTE Please also update the `list_legacy_keywords` function below
// when this function is updated.
let flags = try_match_ident_ignore_ascii_case! { input,
"legacy" => {
let flags = input.try(parse_left_right_center)
.unwrap_or(AlignFlags::empty());
return Ok(AlignFlags::LEGACY | flags)
},
"left" => AlignFlags::LEFT,
"right" => AlignFlags::RIGHT,
"center" => AlignFlags::CENTER,
};
input.expect_ident_matching("legacy")?;
Ok(AlignFlags::LEGACY | flags)
}
fn list_legacy_keywords(f: KeywordsCollectFn) {
f(&["legacy", "left", "right", "center"]);
}<|fim▁end|> | let value = self.value();
|
<|file_name|>pypng.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# png.py - PNG encoder in pure Python
# Copyright (C) 2006 Johann C. Rocholl <[email protected]>
# <ah> Modifications for pyglet by Alex Holkner <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Contributors (alphabetical):
# Nicko van Someren <[email protected]>
#
# Changelog (recent first):
# 2006-06-17 Nicko: Reworked into a class, faster interlacing.
# 2006-06-17 Johann: Very simple prototype PNG decoder.
# 2006-06-17 Nicko: Test suite with various image generators.
# 2006-06-17 Nicko: Alpha-channel, grey-scale, 16-bit/plane support.
# 2006-06-15 Johann: Scanline iterator interface for large input files.
# 2006-06-09 Johann: Very simple prototype PNG encoder.
"""
Pure Python PNG Reader/Writer
This is an implementation of a subset of the PNG specification at
http://www.w3.org/TR/2003/REC-PNG-20031110 in pure Python. It reads
and writes PNG files with 8/16/24/32/48/64 bits per pixel (greyscale,
RGB, RGBA, with 8 or 16 bits per layer), with a number of options. For
help, type "import png; help(png)" in your python interpreter.
This file can also be used as a command-line utility to convert PNM
files to PNG. The interface is similar to that of the pnmtopng program
from the netpbm package. Type "python png.py --help" at the shell
prompt for usage and a list of options.
"""
__revision__ = '$Rev$'
__date__ = '$Date$'
__author__ = '$Author$'
import sys
import zlib
import struct
import math
from array import array
from pyglet.compat import asbytes
_adam7 = ((0, 0, 8, 8),
(4, 0, 8, 8),
(0, 4, 4, 8),
(2, 0, 4, 4),
(0, 2, 2, 4),
(1, 0, 2, 2),
(0, 1, 1, 2))
def interleave_planes(ipixels, apixels, ipsize, apsize):
"""
Interleave color planes, e.g. RGB + A = RGBA.
Return an array of pixels consisting of the ipsize bytes of data
from each pixel in ipixels followed by the apsize bytes of data
from each pixel in apixels, for an image of size width x height.
"""
itotal = len(ipixels)
atotal = len(apixels)
newtotal = itotal + atotal
newpsize = ipsize + apsize
# Set up the output buffer
out = array('B')
# It's annoying that there is no cheap way to set the array size :-(
out.extend(ipixels)
out.extend(apixels)
# Interleave in the pixel data
for i in range(ipsize):
out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize]
for i in range(apsize):
out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize]
return out
class Error(Exception):
pass
class Writer:
"""
PNG encoder in pure Python.
"""
def __init__(self, width, height,
transparent=None,
background=None,
gamma=None,
greyscale=False,
has_alpha=False,
bytes_per_sample=1,
compression=None,
interlaced=False,
chunk_limit=2**20):
"""
Create a PNG encoder object.
Arguments:
width, height - size of the image in pixels
transparent - create a tRNS chunk
background - create a bKGD chunk
gamma - create a gAMA chunk
greyscale - input data is greyscale, not RGB
has_alpha - input data has alpha channel (RGBA)
bytes_per_sample - 8-bit or 16-bit input data
compression - zlib compression level (1-9)
chunk_limit - write multiple IDAT chunks to save memory
If specified, the transparent and background parameters must
be a tuple with three integer values for red, green, blue, or
a simple integer (or singleton tuple) for a greyscale image.
If specified, the gamma parameter must be a float value.
"""
if width <= 0 or height <= 0:
raise ValueError("width and height must be greater than zero")
if has_alpha and transparent is not None:
raise ValueError(
"transparent color not allowed with alpha channel")
if bytes_per_sample < 1 or bytes_per_sample > 2:
raise ValueError("bytes per sample must be 1 or 2")
if transparent is not None:
if greyscale:
if type(transparent) is not int:
raise ValueError(
"transparent color for greyscale must be integer")
else:
if not (len(transparent) == 3 and
type(transparent[0]) is int and
type(transparent[1]) is int and
type(transparent[2]) is int):
raise ValueError(
"transparent color must be a triple of integers")
if background is not None:
if greyscale:
if type(background) is not int:
raise ValueError(
"background color for greyscale must be integer")
else:
if not (len(background) == 3 and
type(background[0]) is int and
type(background[1]) is int and
type(background[2]) is int):
raise ValueError(
"background color must be a triple of integers")
self.width = width
self.height = height
self.transparent = transparent
self.background = background
self.gamma = gamma
self.greyscale = greyscale
self.has_alpha = has_alpha
self.bytes_per_sample = bytes_per_sample
self.compression = compression
self.chunk_limit = chunk_limit
self.interlaced = interlaced
if self.greyscale:
self.color_depth = 1
if self.has_alpha:
self.color_type = 4
self.psize = self.bytes_per_sample * 2
else:
self.color_type = 0
self.psize = self.bytes_per_sample
else:
self.color_depth = 3
if self.has_alpha:
self.color_type = 6
self.psize = self.bytes_per_sample * 4
else:
self.color_type = 2
self.psize = self.bytes_per_sample * 3
def write_chunk(self, outfile, tag, data):
"""
Write a PNG chunk to the output file, including length and checksum.
"""
# http://www.w3.org/TR/PNG/#5Chunk-layout
outfile.write(struct.pack("!I", len(data)))
outfile.write(tag)
outfile.write(data)
checksum = zlib.crc32(tag)
checksum = zlib.crc32(data, checksum)
# <ah> Avoid DeprecationWarning: struct integer overflow masking
# with Python2.5/Windows.
checksum = checksum & 0xffffffff
outfile.write(struct.pack("!I", checksum))
def write(self, outfile, scanlines):
"""
Write a PNG image to the output file.
"""
# http://www.w3.org/TR/PNG/#5PNG-file-signature
outfile.write(struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10))
# http://www.w3.org/TR/PNG/#11IHDR
if self.interlaced:
interlaced = 1
else:
interlaced = 0
self.write_chunk(outfile, 'IHDR',
struct.pack("!2I5B", self.width, self.height,
self.bytes_per_sample * 8,
self.color_type, 0, 0, interlaced))
# http://www.w3.org/TR/PNG/#11tRNS
if self.transparent is not None:
if self.greyscale:
self.write_chunk(outfile, 'tRNS',
struct.pack("!1H", *self.transparent))
else:
self.write_chunk(outfile, 'tRNS',
struct.pack("!3H", *self.transparent))
# http://www.w3.org/TR/PNG/#11bKGD
if self.background is not None:
if self.greyscale:
self.write_chunk(outfile, 'bKGD',
struct.pack("!1H", *self.background))
else:
self.write_chunk(outfile, 'bKGD',
struct.pack("!3H", *self.background))
# http://www.w3.org/TR/PNG/#11gAMA
if self.gamma is not None:
self.write_chunk(outfile, 'gAMA',
struct.pack("!L", int(self.gamma * 100000)))
# http://www.w3.org/TR/PNG/#11IDAT
if self.compression is not None:
compressor = zlib.compressobj(self.compression)
else:
compressor = zlib.compressobj()
data = array('B')
for scanline in scanlines:
data.append(0)
data.extend(scanline)
if len(data) > self.chunk_limit:
compressed = compressor.compress(data.tostring())
if len(compressed):
# print >> sys.stderr, len(data), len(compressed)
self.write_chunk(outfile, 'IDAT', compressed)
data = array('B')
if len(data):
compressed = compressor.compress(data.tostring())
else:
compressed = ''
flushed = compressor.flush()
if len(compressed) or len(flushed):
# print >> sys.stderr, len(data), len(compressed), len(flushed)
self.write_chunk(outfile, 'IDAT', compressed + flushed)
# http://www.w3.org/TR/PNG/#11IEND
self.write_chunk(outfile, 'IEND', '')
def write_array(self, outfile, pixels):
"""
Encode a pixel array to PNG and write output file.
"""
if self.interlaced:
self.write(outfile, self.array_scanlines_interlace(pixels))
else:
self.write(outfile, self.array_scanlines(pixels))
def convert_ppm(self, ppmfile, outfile):
"""
Convert a PPM file containing raw pixel data into a PNG file
with the parameters set in the writer object.
"""
if self.interlaced:
pixels = array('B')
pixels.fromfile(ppmfile,
self.bytes_per_sample * self.color_depth *
self.width * self.height)
self.write(outfile, self.array_scanlines_interlace(pixels))
else:
self.write(outfile, self.file_scanlines(ppmfile))
def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile):
"""
Convert a PPM and PGM file containing raw pixel data into a
PNG outfile with the parameters set in the writer object.
"""
pixels = array('B')
pixels.fromfile(ppmfile,
self.bytes_per_sample * self.color_depth *
self.width * self.height)
apixels = array('B')
apixels.fromfile(pgmfile,
self.bytes_per_sample *
self.width * self.height)
pixels = interleave_planes(pixels, apixels,
self.bytes_per_sample * self.color_depth,
self.bytes_per_sample)
if self.interlaced:
self.write(outfile, self.array_scanlines_interlace(pixels))
else:
self.write(outfile, self.array_scanlines(pixels))
def file_scanlines(self, infile):
"""
Generator for scanlines from an input file.
"""
row_bytes = self.psize * self.width
for y in range(self.height):
scanline = array('B')
scanline.fromfile(infile, row_bytes)
yield scanline
def array_scanlines(self, pixels):
"""
Generator for scanlines from an array.
"""
row_bytes = self.width * self.psize
stop = 0
for y in range(self.height):
start = stop
stop = start + row_bytes
yield pixels[start:stop]
def old_array_scanlines_interlace(self, pixels):
"""
Generator for interlaced scanlines from an array.
http://www.w3.org/TR/PNG/#8InterlaceMethods
"""
row_bytes = self.psize * self.width
for xstart, ystart, xstep, ystep in _adam7:
for y in range(ystart, self.height, ystep):
if xstart < self.width:
if xstep == 1:
offset = y*row_bytes
yield pixels[offset:offset+row_bytes]
else:
row = array('B')
offset = y*row_bytes + xstart* self.psize
skip = self.psize * xstep
for x in range(xstart, self.width, xstep):
row.extend(pixels[offset:offset + self.psize])
offset += skip
yield row
def array_scanlines_interlace(self, pixels):
"""
Generator for interlaced scanlines from an array.
http://www.w3.org/TR/PNG/#8InterlaceMethods
"""
row_bytes = self.psize * self.width
for xstart, ystart, xstep, ystep in _adam7:
for y in range(ystart, self.height, ystep):
if xstart >= self.width:
continue
if xstep == 1:
offset = y * row_bytes
yield pixels[offset:offset+row_bytes]
else:
row = array('B')
# Note we want the ceiling of (self.width - xstart) / xtep
row_len = self.psize * (
(self.width - xstart + xstep - 1) / xstep)
# There's no easier way to set the length of an array
row.extend(pixels[0:row_len])
offset = y * row_bytes + xstart * self.psize
end_offset = (y+1) * row_bytes
skip = self.psize * xstep
for i in range(self.psize):
row[i:row_len:self.psize] = \
pixels[offset+i:end_offset:skip]
yield row
class _readable:
"""
A simple file-like interface for strings and arrays.
"""
def __init__(self, buf):
self.buf = buf
self.offset = 0
def read(self, n):
r = self.buf[offset:offset+n]
if isinstance(r, array):
r = r.tostring()
self.offset += n
return r
class Reader:
"""
PNG decoder in pure Python.
"""
def __init__(self, _guess=None, **kw):
"""
Create a PNG decoder object.
The constructor expects exactly one keyword argument. If you
supply a positional argument instead, it will guess the input
type. You can choose among the following arguments:
filename - name of PNG input file
file - object with a read() method
pixels - array or string with PNG data
"""
if ((_guess is not None and len(kw) != 0) or
(_guess is None and len(kw) != 1)):
raise TypeError("Reader() takes exactly 1 argument")
if _guess is not None:
if isinstance(_guess, array):
kw["pixels"] = _guess
elif isinstance(_guess, str):
kw["filename"] = _guess
elif isinstance(_guess, file):
kw["file"] = _guess
if "filename" in kw:
self.file = file(kw["filename"])
elif "file" in kw:
self.file = kw["file"]
elif "pixels" in kw:
self.file = _readable(kw["pixels"])
else:
raise TypeError("expecting filename, file or pixels array")
def read_chunk(self):
"""
Read a PNG chunk from the input file, return tag name and data.
"""
# http://www.w3.org/TR/PNG/#5Chunk-layout
try:
data_bytes, tag = struct.unpack('!I4s', self.file.read(8))
except struct.error:
raise ValueError('Chunk too short for header')
data = self.file.read(data_bytes)
if len(data) != data_bytes:
raise ValueError('Chunk %s too short for required %i data octets'
% (tag, data_bytes))
checksum = self.file.read(4)
if len(checksum) != 4:
raise ValueError('Chunk %s too short for checksum', tag)
verify = zlib.crc32(tag)
verify = zlib.crc32(data, verify)
# Whether the output from zlib.crc32 is signed or not varies
# according to hideous implementation details, see
# http://bugs.python.org/issue1202 .
# We coerce it to be positive here (in a way which works on
# Python 2.3 and older).
verify &= 2**32 - 1
verify = struct.pack('!I', verify)
if checksum != verify:
# print repr(checksum)
(a,) = struct.unpack('!I', checksum)
(b,) = struct.unpack('!I', verify)
raise ValueError("Checksum error in %s chunk: 0x%X != 0x%X"
% (tag, a, b))
return tag, data
def _reconstruct_sub(self, offset, xstep, ystep):
"""
Reverse sub filter.
"""
pixels = self.pixels
a_offset = offset
offset += self.psize * xstep
if xstep == 1:
for index in range(self.psize, self.row_bytes):
x = pixels[offset]
a = pixels[a_offset]
pixels[offset] = (x + a) & 0xff
offset += 1
a_offset += 1
else:
byte_step = self.psize * xstep
for index in range(byte_step, self.row_bytes, byte_step):
for i in range(self.psize):
x = pixels[offset + i]
a = pixels[a_offset + i]
pixels[offset + i] = (x + a) & 0xff
offset += self.psize * xstep
a_offset += self.psize * xstep
def _reconstruct_up(self, offset, xstep, ystep):
"""
Reverse up filter.
"""
pixels = self.pixels
b_offset = offset - (self.row_bytes * ystep)
if xstep == 1:
for index in range(self.row_bytes):
x = pixels[offset]
b = pixels[b_offset]
pixels[offset] = (x + b) & 0xff
offset += 1
b_offset += 1
else:
for index in range(0, self.row_bytes, xstep * self.psize):
for i in range(self.psize):
x = pixels[offset + i]
b = pixels[b_offset + i]
pixels[offset + i] = (x + b) & 0xff
offset += self.psize * xstep
b_offset += self.psize * xstep
def _reconstruct_average(self, offset, xstep, ystep):
"""
Reverse average filter.
"""
pixels = self.pixels
a_offset = offset - (self.psize * xstep)
b_offset = offset - (self.row_bytes * ystep)
if xstep == 1:
for index in range(self.row_bytes):
x = pixels[offset]
if index < self.psize:
a = 0
else:
a = pixels[a_offset]
if b_offset < 0:
b = 0
else:
b = pixels[b_offset]
pixels[offset] = (x + ((a + b) >> 1)) & 0xff
offset += 1
a_offset += 1
b_offset += 1
else:
for index in range(0, self.row_bytes, self.psize * xstep):
for i in range(self.psize):
x = pixels[offset+i]
if index < self.psize:
a = 0
else:
a = pixels[a_offset + i]
if b_offset < 0:
b = 0
else:
b = pixels[b_offset + i]
pixels[offset + i] = (x + ((a + b) >> 1)) & 0xff
offset += self.psize * xstep
a_offset += self.psize * xstep
b_offset += self.psize * xstep
def _reconstruct_paeth(self, offset, xstep, ystep):
"""
Reverse Paeth filter.
"""
pixels = self.pixels
a_offset = offset - (self.psize * xstep)
b_offset = offset - (self.row_bytes * ystep)
c_offset = b_offset - (self.psize * xstep)
# There's enough inside this loop that it's probably not worth
# optimising for xstep == 1
for index in range(0, self.row_bytes, self.psize * xstep):
for i in range(self.psize):
x = pixels[offset+i]
if index < self.psize:
a = c = 0
b = pixels[b_offset+i]
else:
a = pixels[a_offset+i]
b = pixels[b_offset+i]
c = pixels[c_offset+i]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
pr = a
elif pb <= pc:
pr = b
else:
pr = c
pixels[offset+i] = (x + pr) & 0xff
offset += self.psize * xstep
a_offset += self.psize * xstep
b_offset += self.psize * xstep
c_offset += self.psize * xstep
# N.B. PNG files with 'up', 'average' or 'paeth' filters on the
# first line of a pass are legal. The code above for 'average'
# deals with this case explicitly. For up we map to the null
# filter and for paeth we map to the sub filter.
def reconstruct_line(self, filter_type, first_line, offset, xstep, ystep):
# print >> sys.stderr, "Filter type %s, first_line=%s" % (
# filter_type, first_line)
filter_type += (first_line << 8)
if filter_type == 1 or filter_type == 0x101 or filter_type == 0x104:
self._reconstruct_sub(offset, xstep, ystep)
elif filter_type == 2:
self._reconstruct_up(offset, xstep, ystep)
elif filter_type == 3 or filter_type == 0x103:
self._reconstruct_average(offset, xstep, ystep)
elif filter_type == 4:
self._reconstruct_paeth(offset, xstep, ystep)
return
def deinterlace(self, scanlines):
# print >> sys.stderr, ("Reading interlaced, w=%s, r=%s, planes=%s," +
# " bpp=%s") % (self.width, self.height, self.planes, self.bps)
a = array('B')
self.pixels = a
# Make the array big enough
temp = scanlines[0:self.width*self.height*self.psize]
a.extend(temp)
source_offset = 0
for xstart, ystart, xstep, ystep in _adam7:
# print >> sys.stderr, "Adam7: start=%s,%s step=%s,%s" % (
# xstart, ystart, xstep, ystep)
filter_first_line = 1
for y in range(ystart, self.height, ystep):
if xstart >= self.width:
continue
filter_type = scanlines[source_offset]
source_offset += 1
if xstep == 1:
offset = y * self.row_bytes
a[offset:offset+self.row_bytes] = \
scanlines[source_offset:source_offset + self.row_bytes]
source_offset += self.row_bytes
else:
# Note we want the ceiling of (width - xstart) / xtep
row_len = self.psize * (
(self.width - xstart + xstep - 1) / xstep)
offset = y * self.row_bytes + xstart * self.psize
end_offset = (y+1) * self.row_bytes
skip = self.psize * xstep
for i in range(self.psize):
a[offset+i:end_offset:skip] = \
scanlines[source_offset + i:
source_offset + row_len:
self.psize]
source_offset += row_len
if filter_type:
self.reconstruct_line(filter_type, filter_first_line,
offset, xstep, ystep)
filter_first_line = 0
return a
def read_flat(self, scanlines):
a = array('B')
self.pixels = a
offset = 0
source_offset = 0
filter_first_line = 1
for y in range(self.height):
filter_type = scanlines[source_offset]
source_offset += 1
a.extend(scanlines[source_offset: source_offset + self.row_bytes])
if filter_type:
self.reconstruct_line(filter_type, filter_first_line,
offset, 1, 1)
filter_first_line = 0
offset += self.row_bytes
source_offset += self.row_bytes
return a
def read(self):
"""
Read a simple PNG file, return width, height, pixels and image metadata
This function is a very early prototype with limited flexibility
and excessive use of memory.
"""
signature = self.file.read(8)
if (signature != struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10)):
raise Error("PNG file has invalid header")
compressed = []
image_metadata = {}
while True:
try:
tag, data = self.read_chunk()
except ValueError, e:
raise Error('Chunk error: ' + e.args[0])
# print >> sys.stderr, tag, len(data)
if tag == asbytes('IHDR'): # http://www.w3.org/TR/PNG/#11IHDR
(width, height, bits_per_sample, color_type,
compression_method, filter_method,
interlaced) = struct.unpack("!2I5B", data)
bps = bits_per_sample // 8
if bps == 0:
raise Error("unsupported pixel depth")
if bps > 2 or bits_per_sample != (bps * 8):
raise Error("invalid pixel depth")
if color_type == 0:
greyscale = True
has_alpha = False
planes = 1
elif color_type == 2:
greyscale = False
has_alpha = False
planes = 3
elif color_type == 4:
greyscale = True
has_alpha = True
planes = 2
elif color_type == 6:
greyscale = False
has_alpha = True
planes = 4
else:
raise Error("unknown PNG colour type %s" % color_type)
if compression_method != 0:
raise Error("unknown compression method")
if filter_method != 0:
raise Error("unknown filter method")
self.bps = bps
self.planes = planes
self.psize = bps * planes
self.width = width
self.height = height
self.row_bytes = width * self.psize
elif tag == asbytes('IDAT'): # http://www.w3.org/TR/PNG/#11IDAT
compressed.append(data)
elif tag == asbytes('bKGD'):
if greyscale:
image_metadata["background"] = struct.unpack("!1H", data)
else:
image_metadata["background"] = struct.unpack("!3H", data)
elif tag == asbytes('tRNS'):
if greyscale:
image_metadata["transparent"] = struct.unpack("!1H", data)
else:
image_metadata["transparent"] = struct.unpack("!3H", data)
elif tag == asbytes('gAMA'):
image_metadata["gamma"] = (
struct.unpack("!L", data)[0]) / 100000.0
elif tag == asbytes('IEND'): # http://www.w3.org/TR/PNG/#11IEND
break
scanlines = array('B', zlib.decompress(asbytes('').join(compressed)))
if interlaced:
pixels = self.deinterlace(scanlines)
else:
pixels = self.read_flat(scanlines)
image_metadata["greyscale"] = greyscale
image_metadata["has_alpha"] = has_alpha
image_metadata["bytes_per_sample"] = bps
image_metadata["interlaced"] = interlaced
return width, height, pixels, image_metadata
def test_suite(options):
"""
Run regression test and write PNG file to stdout.
"""
# Below is a big stack of test image generators
def test_gradient_horizontal_lr(x, y):
return x
def test_gradient_horizontal_rl(x, y):
return 1-x
def test_gradient_vertical_tb(x, y):
return y
def test_gradient_vertical_bt(x, y):
return 1-y
def test_radial_tl(x, y):
return max(1-math.sqrt(x*x+y*y), 0.0)
def test_radial_center(x, y):
return test_radial_tl(x-0.5, y-0.5)
def test_radial_tr(x, y):
return test_radial_tl(1-x, y)
def test_radial_bl(x, y):
return test_radial_tl(x, 1-y)
def test_radial_br(x, y):
return test_radial_tl(1-x, 1-y)
def test_stripe(x, n):
return 1.0*(int(x*n) & 1)
def test_stripe_h_2(x, y):
return test_stripe(x, 2)
def test_stripe_h_4(x, y):
return test_stripe(x, 4)
def test_stripe_h_10(x, y):
return test_stripe(x, 10)
def test_stripe_v_2(x, y):
return test_stripe(y, 2)
def test_stripe_v_4(x, y):
return test_stripe(y, 4)
def test_stripe_v_10(x, y):
return test_stripe(y, 10)
def test_stripe_lr_10(x, y):
return test_stripe(x+y, 10)
def test_stripe_rl_10(x, y):
return test_stripe(x-y, 10)
def test_checker(x, y, n):
return 1.0*((int(x*n) & 1) ^ (int(y*n) & 1))
def test_checker_8(x, y):
return test_checker(x, y, 8)
def test_checker_15(x, y):
return test_checker(x, y, 15)
def test_zero(x, y):
return 0
def test_one(x, y):
return 1
test_patterns = {
"GLR": test_gradient_horizontal_lr,
"GRL": test_gradient_horizontal_rl,
"GTB": test_gradient_vertical_tb,
"GBT": test_gradient_vertical_bt,
"RTL": test_radial_tl,
"RTR": test_radial_tr,
"RBL": test_radial_bl,
"RBR": test_radial_br,
"RCTR": test_radial_center,
"HS2": test_stripe_h_2,
"HS4": test_stripe_h_4,
"HS10": test_stripe_h_10,
"VS2": test_stripe_v_2,
"VS4": test_stripe_v_4,
"VS10": test_stripe_v_10,
"LRS": test_stripe_lr_10,
"RLS": test_stripe_rl_10,
"CK8": test_checker_8,
"CK15": test_checker_15,
"ZERO": test_zero,
"ONE": test_one,
}
def test_pattern(width, height, depth, pattern):
a = array('B')
fw = float(width)
fh = float(height)
pfun = test_patterns[pattern]
if depth == 1:
for y in range(height):
for x in range(width):
a.append(int(pfun(float(x)/fw, float(y)/fh) * 255))
elif depth == 2:
for y in range(height):
for x in range(width):
v = int(pfun(float(x)/fw, float(y)/fh) * 65535)
a.append(v >> 8)
a.append(v & 0xff)
return a
def test_rgba(size=256, depth=1,
red="GTB", green="GLR", blue="RTL", alpha=None):
r = test_pattern(size, size, depth, red)
g = test_pattern(size, size, depth, green)
b = test_pattern(size, size, depth, blue)
if alpha:
a = test_pattern(size, size, depth, alpha)
i = interleave_planes(r, g, depth, depth)
i = interleave_planes(i, b, 2 * depth, depth)
if alpha:
i = interleave_planes(i, a, 3 * depth, depth)
return i
# The body of test_suite()
size = 256
if options.test_size:
size = options.test_size
depth = 1
if options.test_deep:
depth = 2
kwargs = {}<|fim▁hole|> if options.test_green:
kwargs["green"] = options.test_green
if options.test_blue:
kwargs["blue"] = options.test_blue
if options.test_alpha:
kwargs["alpha"] = options.test_alpha
pixels = test_rgba(size, depth, **kwargs)
writer = Writer(size, size,
bytes_per_sample=depth,
transparent=options.transparent,
background=options.background,
gamma=options.gamma,
has_alpha=options.test_alpha,
compression=options.compression,
interlaced=options.interlace)
writer.write_array(sys.stdout, pixels)
def read_pnm_header(infile, supported='P6'):
"""
Read a PNM header, return width and height of the image in pixels.
"""
header = []
while len(header) < 4:
line = infile.readline()
sharp = line.find('#')
if sharp > -1:
line = line[:sharp]
header.extend(line.split())
if len(header) == 3 and header[0] == 'P4':
break # PBM doesn't have maxval
if header[0] not in supported:
raise NotImplementedError('file format %s not supported' % header[0])
if header[0] != 'P4' and header[3] != '255':
raise NotImplementedError('maxval %s not supported' % header[3])
return int(header[1]), int(header[2])
def color_triple(color):
"""
Convert a command line color value to a RGB triple of integers.
FIXME: Somewhere we need support for greyscale backgrounds etc.
"""
if color.startswith('#') and len(color) == 4:
return (int(color[1], 16),
int(color[2], 16),
int(color[3], 16))
if color.startswith('#') and len(color) == 7:
return (int(color[1:3], 16),
int(color[3:5], 16),
int(color[5:7], 16))
elif color.startswith('#') and len(color) == 13:
return (int(color[1:5], 16),
int(color[5:9], 16),
int(color[9:13], 16))
def _main():
"""
Run the PNG encoder with options from the command line.
"""
# Parse command line arguments
from optparse import OptionParser
version = '%prog ' + __revision__.strip('$').replace('Rev: ', 'r')
parser = OptionParser(version=version)
parser.set_usage("%prog [options] [pnmfile]")
parser.add_option("-i", "--interlace",
default=False, action="store_true",
help="create an interlaced PNG file (Adam7)")
parser.add_option("-t", "--transparent",
action="store", type="string", metavar="color",
help="mark the specified color as transparent")
parser.add_option("-b", "--background",
action="store", type="string", metavar="color",
help="save the specified background color")
parser.add_option("-a", "--alpha",
action="store", type="string", metavar="pgmfile",
help="alpha channel transparency (RGBA)")
parser.add_option("-g", "--gamma",
action="store", type="float", metavar="value",
help="save the specified gamma value")
parser.add_option("-c", "--compression",
action="store", type="int", metavar="level",
help="zlib compression level (0-9)")
parser.add_option("-T", "--test",
default=False, action="store_true",
help="create a test image")
parser.add_option("-R", "--test-red",
action="store", type="string", metavar="pattern",
help="test pattern for the red image layer")
parser.add_option("-G", "--test-green",
action="store", type="string", metavar="pattern",
help="test pattern for the green image layer")
parser.add_option("-B", "--test-blue",
action="store", type="string", metavar="pattern",
help="test pattern for the blue image layer")
parser.add_option("-A", "--test-alpha",
action="store", type="string", metavar="pattern",
help="test pattern for the alpha image layer")
parser.add_option("-D", "--test-deep",
default=False, action="store_true",
help="use test patterns with 16 bits per layer")
parser.add_option("-S", "--test-size",
action="store", type="int", metavar="size",
help="width and height of the test image")
(options, args) = parser.parse_args()
# Convert options
if options.transparent is not None:
options.transparent = color_triple(options.transparent)
if options.background is not None:
options.background = color_triple(options.background)
# Run regression tests
if options.test:
return test_suite(options)
# Prepare input and output files
if len(args) == 0:
ppmfilename = '-'
ppmfile = sys.stdin
elif len(args) == 1:
ppmfilename = args[0]
ppmfile = open(ppmfilename, 'rb')
else:
parser.error("more than one input file")
outfile = sys.stdout
# Encode PNM to PNG
width, height = read_pnm_header(ppmfile)
writer = Writer(width, height,
transparent=options.transparent,
background=options.background,
has_alpha=options.alpha is not None,
gamma=options.gamma,
compression=options.compression)
if options.alpha is not None:
pgmfile = open(options.alpha, 'rb')
awidth, aheight = read_pnm_header(pgmfile, 'P5')
if (awidth, aheight) != (width, height):
raise ValueError("alpha channel image size mismatch" +
" (%s has %sx%s but %s has %sx%s)"
% (ppmfilename, width, height,
options.alpha, awidth, aheight))
writer.convert_ppm_and_pgm(ppmfile, pgmfile, outfile,
interlace=options.interlace)
else:
writer.convert_ppm(ppmfile, outfile,
interlace=options.interlace)
if __name__ == '__main__':
_main()<|fim▁end|> | if options.test_red:
kwargs["red"] = options.test_red |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.