file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
0010_auto_20200722_1738.py | # Generated by Django 3.0.8 on 2020-07-22 12:08
from django.db import migrations, models
class | (migrations.Migration):
dependencies = [
('Home', '0009_auto_20200722_1734'),
]
operations = [
migrations.AlterField(
model_name='student',
name='branch',
field=models.CharField(choices=[('CSE', 'CSE'), ('IT', 'IT'), ('EEE', 'EEE'), ('MECH', 'MECH'), ('ECE', 'ECE')], max_length=200, null=True),
),
migrations.AlterField(
model_name='student',
name='rollno',
field=models.CharField(max_length=200, primary_key=True, serialize=False),
),
]
| Migration |
lingovn.py | # Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//h1[@itemprop='name']",
'price' : "//div[@class='div-new-price']/span[@class='new-price']",
'category' : "//span[@class='item']/a[@itemprop='url']/span[@itemprop='title']",
'description' : "//div[@class='block-template-content']/div[@class='clearfix mt2x']",
'images' : "//div[@class='dsi-img full-cover ']/@data-image-hoverattribute",
'canonical' : "//link[@rel='canonical']/@href",
'base_url' : "",
'brand' : "//div[@class='infos prod-detail-brand']/a[@class='font-semibold brand-name']",
'in_stock' : "",
'guarantee' : "",
'promotion' : ""
}
name = 'lingo.vn'
allowed_domains = ['lingo.vn']
start_urls = ['http://lingo.vn/']
tracking_url = ''
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = ['']
rules = [
Rule(LinkExtractor(allow=['/[\w-]+-p\d+\.html$']), 'parse_item'), | ] | Rule(LinkExtractor(allow=['/[\w-]+-c\d+/($|\?page=\d+$)']), 'parse'),
#Rule(LinkExtractor(), 'parse_item_and_links'), |
ds.go | //
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package txnBuf
import (
"context"
ds "go.chromium.org/luci/gae/service/datastore"
)
type dsBuf struct {
ds.RawInterface
}
var _ ds.RawInterface = (*dsBuf)(nil)
func (d *dsBuf) RunInTransaction(f func(context.Context) error, opts *ds.TransactionOptions) error {
return doRunInTransaction(d.RawInterface, f, opts)
}
func doRunInTransaction(base ds.RawInterface, f func(context.Context) error, opts *ds.TransactionOptions) error {
return base.RunInTransaction(func(ctx context.Context) error {
return withTxnBuf(ctx, f, opts)
}, opts)
} | // Copyright 2015 The LUCI Authors. |
|
l2vpn_fxc_ac.pb.go | /*
Copyright 2019 Cisco Systems
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: l2vpn_fxc_ac.proto
package cisco_ios_xr_l2vpn_oper_l2vpn_flexible_xconnect_services_flexible_xconnect_service_flexible_xconnect_service_acs_flexible_xconnect_service_ac
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type L2VpnFxcAc_KEYS struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
InterfaceName string `protobuf:"bytes,2,opt,name=interface_name,json=interfaceName,proto3" json:"interface_name,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnFxcAc_KEYS) Reset() { *m = L2VpnFxcAc_KEYS{} }
func (m *L2VpnFxcAc_KEYS) String() string { return proto.CompactTextString(m) }
func (*L2VpnFxcAc_KEYS) ProtoMessage() {}
func (*L2VpnFxcAc_KEYS) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{0}
}
func (m *L2VpnFxcAc_KEYS) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnFxcAc_KEYS.Unmarshal(m, b)
}
func (m *L2VpnFxcAc_KEYS) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnFxcAc_KEYS.Marshal(b, m, deterministic)
}
func (m *L2VpnFxcAc_KEYS) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnFxcAc_KEYS.Merge(m, src)
}
func (m *L2VpnFxcAc_KEYS) XXX_Size() int {
return xxx_messageInfo_L2VpnFxcAc_KEYS.Size(m)
}
func (m *L2VpnFxcAc_KEYS) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnFxcAc_KEYS.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnFxcAc_KEYS proto.InternalMessageInfo
func (m *L2VpnFxcAc_KEYS) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *L2VpnFxcAc_KEYS) GetInterfaceName() string {
if m != nil {
return m.InterfaceName
}
return ""
}
type L2VpnEthernet struct {
XconnectTags uint32 `protobuf:"varint,1,opt,name=xconnect_tags,json=xconnectTags,proto3" json:"xconnect_tags,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnEthernet) Reset() { *m = L2VpnEthernet{} }
func (m *L2VpnEthernet) String() string { return proto.CompactTextString(m) }
func (*L2VpnEthernet) ProtoMessage() {}
func (*L2VpnEthernet) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{1}
}
func (m *L2VpnEthernet) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnEthernet.Unmarshal(m, b)
}
func (m *L2VpnEthernet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnEthernet.Marshal(b, m, deterministic)
}
func (m *L2VpnEthernet) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnEthernet.Merge(m, src)
}
func (m *L2VpnEthernet) XXX_Size() int {
return xxx_messageInfo_L2VpnEthernet.Size(m)
}
func (m *L2VpnEthernet) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnEthernet.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnEthernet proto.InternalMessageInfo
func (m *L2VpnEthernet) GetXconnectTags() uint32 {
if m != nil {
return m.XconnectTags
}
return 0
}
type L2VpnEfpRange struct {
Lower uint32 `protobuf:"varint,1,opt,name=lower,proto3" json:"lower,omitempty"`
Upper uint32 `protobuf:"varint,2,opt,name=upper,proto3" json:"upper,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnEfpRange) Reset() { *m = L2VpnEfpRange{} }
func (m *L2VpnEfpRange) String() string { return proto.CompactTextString(m) }
func (*L2VpnEfpRange) ProtoMessage() {}
func (*L2VpnEfpRange) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{2}
}
func (m *L2VpnEfpRange) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnEfpRange.Unmarshal(m, b)
}
func (m *L2VpnEfpRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnEfpRange.Marshal(b, m, deterministic)
}
func (m *L2VpnEfpRange) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnEfpRange.Merge(m, src)
}
func (m *L2VpnEfpRange) XXX_Size() int {
return xxx_messageInfo_L2VpnEfpRange.Size(m)
}
func (m *L2VpnEfpRange) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnEfpRange.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnEfpRange proto.InternalMessageInfo
func (m *L2VpnEfpRange) GetLower() uint32 {
if m != nil {
return m.Lower
}
return 0
}
func (m *L2VpnEfpRange) GetUpper() uint32 {
if m != nil {
return m.Upper
}
return 0
}
type L2VpnVlan struct {
XconnectTags uint32 `protobuf:"varint,1,opt,name=xconnect_tags,json=xconnectTags,proto3" json:"xconnect_tags,omitempty"`
VlanRewriteTag uint32 `protobuf:"varint,2,opt,name=vlan_rewrite_tag,json=vlanRewriteTag,proto3" json:"vlan_rewrite_tag,omitempty"`
SimpleEfp uint32 `protobuf:"varint,3,opt,name=simple_efp,json=simpleEfp,proto3" json:"simple_efp,omitempty"`
EncapsulationType uint32 `protobuf:"varint,4,opt,name=encapsulation_type,json=encapsulationType,proto3" json:"encapsulation_type,omitempty"`
OuterTag uint32 `protobuf:"varint,5,opt,name=outer_tag,json=outerTag,proto3" json:"outer_tag,omitempty"`
VlanRange []*L2VpnEfpRange `protobuf:"bytes,6,rep,name=vlan_range,json=vlanRange,proto3" json:"vlan_range,omitempty"`
RewriteTag []uint32 `protobuf:"varint,7,rep,packed,name=rewrite_tag,json=rewriteTag,proto3" json:"rewrite_tag,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnVlan) Reset() { *m = L2VpnVlan{} }
func (m *L2VpnVlan) String() string { return proto.CompactTextString(m) }
func (*L2VpnVlan) ProtoMessage() {}
func (*L2VpnVlan) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{3}
}
func (m *L2VpnVlan) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnVlan.Unmarshal(m, b)
}
func (m *L2VpnVlan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnVlan.Marshal(b, m, deterministic)
}
func (m *L2VpnVlan) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnVlan.Merge(m, src)
}
func (m *L2VpnVlan) XXX_Size() int {
return xxx_messageInfo_L2VpnVlan.Size(m)
}
func (m *L2VpnVlan) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnVlan.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnVlan proto.InternalMessageInfo
func (m *L2VpnVlan) GetXconnectTags() uint32 {
if m != nil {
return m.XconnectTags
}
return 0
}
func (m *L2VpnVlan) GetVlanRewriteTag() uint32 {
if m != nil {
return m.VlanRewriteTag
}
return 0
}
func (m *L2VpnVlan) GetSimpleEfp() uint32 {
if m != nil {
return m.SimpleEfp
}
return 0
}
func (m *L2VpnVlan) GetEncapsulationType() uint32 {
if m != nil {
return m.EncapsulationType
}
return 0
}
func (m *L2VpnVlan) GetOuterTag() uint32 {
if m != nil {
return m.OuterTag
}
return 0
}
func (m *L2VpnVlan) GetVlanRange() []*L2VpnEfpRange {
if m != nil {
return m.VlanRange
}
return nil
}
func (m *L2VpnVlan) GetRewriteTag() []uint32 {
if m != nil {
return m.RewriteTag
}
return nil
}
type L2VpnTdmOption struct {
PayloadBytes uint32 `protobuf:"varint,1,opt,name=payload_bytes,json=payloadBytes,proto3" json:"payload_bytes,omitempty"`
BitRate uint32 `protobuf:"varint,2,opt,name=bit_rate,json=bitRate,proto3" json:"bit_rate,omitempty"`
Rtp string `protobuf:"bytes,3,opt,name=rtp,proto3" json:"rtp,omitempty"`
TimestampMode string `protobuf:"bytes,4,opt,name=timestamp_mode,json=timestampMode,proto3" json:"timestamp_mode,omitempty"`
SignallingPackets uint32 `protobuf:"varint,5,opt,name=signalling_packets,json=signallingPackets,proto3" json:"signalling_packets,omitempty"`
Cas uint32 `protobuf:"varint,6,opt,name=cas,proto3" json:"cas,omitempty"`
RtpHeaderPayloadType uint32 `protobuf:"varint,7,opt,name=rtp_header_payload_type,json=rtpHeaderPayloadType,proto3" json:"rtp_header_payload_type,omitempty"`
TimestampClockFreq uint32 `protobuf:"varint,8,opt,name=timestamp_clock_freq,json=timestampClockFreq,proto3" json:"timestamp_clock_freq,omitempty"`
Ssrc uint32 `protobuf:"varint,9,opt,name=ssrc,proto3" json:"ssrc,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnTdmOption) Reset() { *m = L2VpnTdmOption{} }
func (m *L2VpnTdmOption) String() string { return proto.CompactTextString(m) }
func (*L2VpnTdmOption) ProtoMessage() {}
func (*L2VpnTdmOption) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{4}
}
func (m *L2VpnTdmOption) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnTdmOption.Unmarshal(m, b)
}
func (m *L2VpnTdmOption) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnTdmOption.Marshal(b, m, deterministic)
}
func (m *L2VpnTdmOption) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnTdmOption.Merge(m, src)
}
func (m *L2VpnTdmOption) XXX_Size() int {
return xxx_messageInfo_L2VpnTdmOption.Size(m)
}
func (m *L2VpnTdmOption) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnTdmOption.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnTdmOption proto.InternalMessageInfo
func (m *L2VpnTdmOption) GetPayloadBytes() uint32 {
if m != nil {
return m.PayloadBytes
}
return 0
}
func (m *L2VpnTdmOption) GetBitRate() uint32 {
if m != nil {
return m.BitRate
}
return 0
}
func (m *L2VpnTdmOption) GetRtp() string {
if m != nil {
return m.Rtp
}
return ""
}
func (m *L2VpnTdmOption) GetTimestampMode() string {
if m != nil {
return m.TimestampMode
}
return ""
}
func (m *L2VpnTdmOption) GetSignallingPackets() uint32 {
if m != nil {
return m.SignallingPackets
}
return 0
}
func (m *L2VpnTdmOption) GetCas() uint32 {
if m != nil {
return m.Cas
}
return 0
}
func (m *L2VpnTdmOption) GetRtpHeaderPayloadType() uint32 {
if m != nil {
return m.RtpHeaderPayloadType
}
return 0
}
func (m *L2VpnTdmOption) GetTimestampClockFreq() uint32 {
if m != nil {
return m.TimestampClockFreq
}
return 0
}
func (m *L2VpnTdmOption) GetSsrc() uint32 {
if m != nil {
return m.Ssrc
}
return 0
}
type L2VpnTdm struct {
TimeslotGroup string `protobuf:"bytes,1,opt,name=timeslot_group,json=timeslotGroup,proto3" json:"timeslot_group,omitempty"`
TimeslotRate uint32 `protobuf:"varint,2,opt,name=timeslot_rate,json=timeslotRate,proto3" json:"timeslot_rate,omitempty"`
TdmMode string `protobuf:"bytes,3,opt,name=tdm_mode,json=tdmMode,proto3" json:"tdm_mode,omitempty"`
TdmOptions *L2VpnTdmOption `protobuf:"bytes,4,opt,name=tdm_options,json=tdmOptions,proto3" json:"tdm_options,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnTdm) Reset() { *m = L2VpnTdm{} }
func (m *L2VpnTdm) String() string { return proto.CompactTextString(m) }
func (*L2VpnTdm) ProtoMessage() {}
func (*L2VpnTdm) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{5}
}
func (m *L2VpnTdm) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnTdm.Unmarshal(m, b)
}
func (m *L2VpnTdm) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnTdm.Marshal(b, m, deterministic)
}
func (m *L2VpnTdm) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnTdm.Merge(m, src)
}
func (m *L2VpnTdm) XXX_Size() int {
return xxx_messageInfo_L2VpnTdm.Size(m)
}
func (m *L2VpnTdm) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnTdm.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnTdm proto.InternalMessageInfo
func (m *L2VpnTdm) GetTimeslotGroup() string {
if m != nil {
return m.TimeslotGroup
}
return ""
}
func (m *L2VpnTdm) GetTimeslotRate() uint32 {
if m != nil {
return m.TimeslotRate
}
return 0
}
func (m *L2VpnTdm) GetTdmMode() string {
if m != nil {
return m.TdmMode
}
return ""
}
func (m *L2VpnTdm) GetTdmOptions() *L2VpnTdmOption {
if m != nil {
return m.TdmOptions
}
return nil
}
type L2VpnAtm struct {
MaximumNumberCellsPacked uint32 `protobuf:"varint,1,opt,name=maximum_number_cells_packed,json=maximumNumberCellsPacked,proto3" json:"maximum_number_cells_packed,omitempty"`
MaximumNumberCellsUnPacked uint32 `protobuf:"varint,2,opt,name=maximum_number_cells_un_packed,json=maximumNumberCellsUnPacked,proto3" json:"maximum_number_cells_un_packed,omitempty"`
AtmMode string `protobuf:"bytes,3,opt,name=atm_mode,json=atmMode,proto3" json:"atm_mode,omitempty"`
Vpi uint32 `protobuf:"varint,4,opt,name=vpi,proto3" json:"vpi,omitempty"`
Vci uint32 `protobuf:"varint,5,opt,name=vci,proto3" json:"vci,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnAtm) Reset() { *m = L2VpnAtm{} }
func (m *L2VpnAtm) String() string { return proto.CompactTextString(m) }
func (*L2VpnAtm) ProtoMessage() {}
func (*L2VpnAtm) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{6}
}
func (m *L2VpnAtm) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnAtm.Unmarshal(m, b)
}
func (m *L2VpnAtm) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnAtm.Marshal(b, m, deterministic)
}
func (m *L2VpnAtm) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnAtm.Merge(m, src)
}
func (m *L2VpnAtm) XXX_Size() int {
return xxx_messageInfo_L2VpnAtm.Size(m)
}
func (m *L2VpnAtm) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnAtm.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnAtm proto.InternalMessageInfo
func (m *L2VpnAtm) GetMaximumNumberCellsPacked() uint32 {
if m != nil {
return m.MaximumNumberCellsPacked
}
return 0
}
func (m *L2VpnAtm) GetMaximumNumberCellsUnPacked() uint32 {
if m != nil {
return m.MaximumNumberCellsUnPacked
}
return 0
}
func (m *L2VpnAtm) GetAtmMode() string {
if m != nil {
return m.AtmMode
}
return ""
}
func (m *L2VpnAtm) GetVpi() uint32 {
if m != nil {
return m.Vpi
}
return 0
}
func (m *L2VpnAtm) GetVci() uint32 {
if m != nil {
return m.Vci
}
return 0
}
type L2VpnFr struct {
FrMode string `protobuf:"bytes,1,opt,name=fr_mode,json=frMode,proto3" json:"fr_mode,omitempty"`
Dlci uint32 `protobuf:"varint,2,opt,name=dlci,proto3" json:"dlci,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnFr) Reset() { *m = L2VpnFr{} }
func (m *L2VpnFr) String() string { return proto.CompactTextString(m) }
func (*L2VpnFr) ProtoMessage() {}
func (*L2VpnFr) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{7}
}
func (m *L2VpnFr) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnFr.Unmarshal(m, b)
}
func (m *L2VpnFr) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnFr.Marshal(b, m, deterministic)
}
func (m *L2VpnFr) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnFr.Merge(m, src)
}
func (m *L2VpnFr) XXX_Size() int {
return xxx_messageInfo_L2VpnFr.Size(m)
}
func (m *L2VpnFr) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnFr.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnFr proto.InternalMessageInfo
func (m *L2VpnFr) GetFrMode() string {
if m != nil {
return m.FrMode
}
return ""
}
func (m *L2VpnFr) GetDlci() uint32 {
if m != nil {
return m.Dlci
}
return 0
}
type InterfacesItem struct {
Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *InterfacesItem) Reset() { *m = InterfacesItem{} }
func (m *InterfacesItem) String() string { return proto.CompactTextString(m) }
func (*InterfacesItem) ProtoMessage() {}
func (*InterfacesItem) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{8}
}
func (m *InterfacesItem) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_InterfacesItem.Unmarshal(m, b)
}
func (m *InterfacesItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_InterfacesItem.Marshal(b, m, deterministic)
}
func (m *InterfacesItem) XXX_Merge(src proto.Message) {
xxx_messageInfo_InterfacesItem.Merge(m, src)
}
func (m *InterfacesItem) XXX_Size() int {
return xxx_messageInfo_InterfacesItem.Size(m)
}
func (m *InterfacesItem) XXX_DiscardUnknown() {
xxx_messageInfo_InterfacesItem.DiscardUnknown(m)
}
var xxx_messageInfo_InterfacesItem proto.InternalMessageInfo
func (m *InterfacesItem) GetValue() string {
if m != nil {
return m.Value
}
return ""
}
type ItemIflistIfl struct {
InterfaceListName string `protobuf:"bytes,1,opt,name=interface_list_name,json=interfaceListName,proto3" json:"interface_list_name,omitempty"`
InterfaceListId uint32 `protobuf:"varint,2,opt,name=interface_list_id,json=interfaceListId,proto3" json:"interface_list_id,omitempty"`
Interface []*InterfacesItem `protobuf:"bytes,3,rep,name=interface,proto3" json:"interface,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ItemIflistIfl) Reset() { *m = ItemIflistIfl{} }
func (m *ItemIflistIfl) String() string { return proto.CompactTextString(m) }
func (*ItemIflistIfl) ProtoMessage() {}
func (*ItemIflistIfl) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{9}
}
func (m *ItemIflistIfl) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ItemIflistIfl.Unmarshal(m, b)
}
func (m *ItemIflistIfl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ItemIflistIfl.Marshal(b, m, deterministic)
}
func (m *ItemIflistIfl) XXX_Merge(src proto.Message) {
xxx_messageInfo_ItemIflistIfl.Merge(m, src)
}
func (m *ItemIflistIfl) XXX_Size() int {
return xxx_messageInfo_ItemIflistIfl.Size(m)
}
func (m *ItemIflistIfl) XXX_DiscardUnknown() {
xxx_messageInfo_ItemIflistIfl.DiscardUnknown(m)
}
var xxx_messageInfo_ItemIflistIfl proto.InternalMessageInfo
func (m *ItemIflistIfl) GetInterfaceListName() string {
if m != nil {
return m.InterfaceListName
}
return ""
}
func (m *ItemIflistIfl) GetInterfaceListId() uint32 {
if m != nil {
return m.InterfaceListId
}
return 0
}
func (m *ItemIflistIfl) GetInterface() []*InterfacesItem {
if m != nil {
return m.Interface
}
return nil
}
type L2VpnPwhe struct {
IsValid bool `protobuf:"varint,1,opt,name=is_valid,json=isValid,proto3" json:"is_valid,omitempty"`
InterfaceList *ItemIflistIfl `protobuf:"bytes,2,opt,name=interface_list,json=interfaceList,proto3" json:"interface_list,omitempty"`
InternalLabel uint32 `protobuf:"varint,3,opt,name=internal_label,json=internalLabel,proto3" json:"internal_label,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnPwhe) Reset() { *m = L2VpnPwhe{} }
func (m *L2VpnPwhe) String() string { return proto.CompactTextString(m) }
func (*L2VpnPwhe) ProtoMessage() {}
func (*L2VpnPwhe) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{10}
}
func (m *L2VpnPwhe) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnPwhe.Unmarshal(m, b)
}
func (m *L2VpnPwhe) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnPwhe.Marshal(b, m, deterministic)
}
func (m *L2VpnPwhe) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnPwhe.Merge(m, src)
}
func (m *L2VpnPwhe) XXX_Size() int {
return xxx_messageInfo_L2VpnPwhe.Size(m)
}
func (m *L2VpnPwhe) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnPwhe.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnPwhe proto.InternalMessageInfo
func (m *L2VpnPwhe) GetIsValid() bool {
if m != nil {
return m.IsValid
}
return false
}
func (m *L2VpnPwhe) GetInterfaceList() *ItemIflistIfl {
if m != nil {
return m.InterfaceList
}
return nil
}
func (m *L2VpnPwhe) GetInternalLabel() uint32 {
if m != nil {
return m.InternalLabel
}
return 0
}
type L2VpnInterfaceParams struct {
Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"`
Ethernet *L2VpnEthernet `protobuf:"bytes,2,opt,name=ethernet,proto3" json:"ethernet,omitempty"`
Vlan *L2VpnVlan `protobuf:"bytes,3,opt,name=vlan,proto3" json:"vlan,omitempty"`
Tdm *L2VpnTdm `protobuf:"bytes,4,opt,name=tdm,proto3" json:"tdm,omitempty"`
Atm *L2VpnAtm `protobuf:"bytes,5,opt,name=atm,proto3" json:"atm,omitempty"`
Fr *L2VpnFr `protobuf:"bytes,6,opt,name=fr,proto3" json:"fr,omitempty"`
PseudowireEther *L2VpnPwhe `protobuf:"bytes,7,opt,name=pseudowire_ether,json=pseudowireEther,proto3" json:"pseudowire_ether,omitempty"`
PseudowireIw *L2VpnPwhe `protobuf:"bytes,8,opt,name=pseudowire_iw,json=pseudowireIw,proto3" json:"pseudowire_iw,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnInterfaceParams) Reset() { *m = L2VpnInterfaceParams{} }
func (m *L2VpnInterfaceParams) String() string { return proto.CompactTextString(m) }
func (*L2VpnInterfaceParams) ProtoMessage() {}
func (*L2VpnInterfaceParams) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{11}
}
func (m *L2VpnInterfaceParams) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnInterfaceParams.Unmarshal(m, b)
}
func (m *L2VpnInterfaceParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnInterfaceParams.Marshal(b, m, deterministic)
}
func (m *L2VpnInterfaceParams) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnInterfaceParams.Merge(m, src)
}
func (m *L2VpnInterfaceParams) XXX_Size() int {
return xxx_messageInfo_L2VpnInterfaceParams.Size(m)
}
func (m *L2VpnInterfaceParams) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnInterfaceParams.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnInterfaceParams proto.InternalMessageInfo
func (m *L2VpnInterfaceParams) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *L2VpnInterfaceParams) GetEthernet() *L2VpnEthernet {
if m != nil {
return m.Ethernet
}
return nil
}
func (m *L2VpnInterfaceParams) GetVlan() *L2VpnVlan {
if m != nil {
return m.Vlan
}
return nil
}
func (m *L2VpnInterfaceParams) GetTdm() *L2VpnTdm {
if m != nil {
return m.Tdm
}
return nil
}
func (m *L2VpnInterfaceParams) GetAtm() *L2VpnAtm {
if m != nil {
return m.Atm
}
return nil
}
func (m *L2VpnInterfaceParams) GetFr() *L2VpnFr {
if m != nil {
return m.Fr
}
return nil
}
func (m *L2VpnInterfaceParams) GetPseudowireEther() *L2VpnPwhe {
if m != nil {
return m.PseudowireEther
}
return nil
}
func (m *L2VpnInterfaceParams) GetPseudowireIw() *L2VpnPwhe {
if m != nil {
return m.PseudowireIw
}
return nil
}
type L2VpnInterface struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Mtu uint32 `protobuf:"varint,2,opt,name=mtu,proto3" json:"mtu,omitempty"`
PayloadBytes uint32 `protobuf:"varint,3,opt,name=payload_bytes,json=payloadBytes,proto3" json:"payload_bytes,omitempty"`
Parameters *L2VpnInterfaceParams `protobuf:"bytes,4,opt,name=parameters,proto3" json:"parameters,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnInterface) Reset() { *m = L2VpnInterface{} }
func (m *L2VpnInterface) String() string { return proto.CompactTextString(m) }
func (*L2VpnInterface) ProtoMessage() {}
func (*L2VpnInterface) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{12}
}
func (m *L2VpnInterface) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnInterface.Unmarshal(m, b)
}
func (m *L2VpnInterface) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnInterface.Marshal(b, m, deterministic)
}
func (m *L2VpnInterface) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnInterface.Merge(m, src)
}
func (m *L2VpnInterface) XXX_Size() int {
return xxx_messageInfo_L2VpnInterface.Size(m)
}
func (m *L2VpnInterface) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnInterface.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnInterface proto.InternalMessageInfo
func (m *L2VpnInterface) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *L2VpnInterface) GetMtu() uint32 {
if m != nil {
return m.Mtu
}
return 0
}
func (m *L2VpnInterface) GetPayloadBytes() uint32 {
if m != nil {
return m.PayloadBytes
}
return 0
}
func (m *L2VpnInterface) GetParameters() *L2VpnInterfaceParams {
if m != nil {
return m.Parameters
}
return nil
}
type L2FibStatsBagCounters struct {
PacketCounts uint64 `protobuf:"varint,1,opt,name=packet_counts,json=packetCounts,proto3" json:"packet_counts,omitempty"`
ByteCounts uint64 `protobuf:"varint,2,opt,name=byte_counts,json=byteCounts,proto3" json:"byte_counts,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2FibStatsBagCounters) Reset() { *m = L2FibStatsBagCounters{} }
func (m *L2FibStatsBagCounters) String() string { return proto.CompactTextString(m) }
func (*L2FibStatsBagCounters) ProtoMessage() {}
func (*L2FibStatsBagCounters) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{13}
}
func (m *L2FibStatsBagCounters) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2FibStatsBagCounters.Unmarshal(m, b)
}
func (m *L2FibStatsBagCounters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2FibStatsBagCounters.Marshal(b, m, deterministic)
}
func (m *L2FibStatsBagCounters) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2FibStatsBagCounters.Merge(m, src)
}
func (m *L2FibStatsBagCounters) XXX_Size() int {
return xxx_messageInfo_L2FibStatsBagCounters.Size(m)
}
func (m *L2FibStatsBagCounters) XXX_DiscardUnknown() {
xxx_messageInfo_L2FibStatsBagCounters.DiscardUnknown(m)
}
var xxx_messageInfo_L2FibStatsBagCounters proto.InternalMessageInfo
func (m *L2FibStatsBagCounters) GetPacketCounts() uint64 {
if m != nil {
return m.PacketCounts
}
return 0
}
func (m *L2FibStatsBagCounters) GetByteCounts() uint64 {
if m != nil {
return m.ByteCounts
}
return 0
}
type L2FibStatsBagImp struct {
ImpositionStat *L2FibStatsBagCounters `protobuf:"bytes,1,opt,name=imposition_stat,json=impositionStat,proto3" json:"imposition_stat,omitempty"`
ImpositionMtuDrop *L2FibStatsBagCounters `protobuf:"bytes,2,opt,name=imposition_mtu_drop,json=impositionMtuDrop,proto3" json:"imposition_mtu_drop,omitempty"`
ImpostionTailDrop *L2FibStatsBagCounters `protobuf:"bytes,3,opt,name=impostion_tail_drop,json=impostionTailDrop,proto3" json:"impostion_tail_drop,omitempty"`
L2FsbiDrop *L2FibStatsBagCounters `protobuf:"bytes,4,opt,name=l2fsbi_drop,json=l2fsbiDrop,proto3" json:"l2fsbi_drop,omitempty"`
Multicast *L2FibStatsBagCounters `protobuf:"bytes,5,opt,name=multicast,proto3" json:"multicast,omitempty"`
Broadcast *L2FibStatsBagCounters `protobuf:"bytes,6,opt,name=broadcast,proto3" json:"broadcast,omitempty"`
KnownUnicast *L2FibStatsBagCounters `protobuf:"bytes,7,opt,name=known_unicast,json=knownUnicast,proto3" json:"known_unicast,omitempty"`
UnknownUnicast *L2FibStatsBagCounters `protobuf:"bytes,8,opt,name=unknown_unicast,json=unknownUnicast,proto3" json:"unknown_unicast,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2FibStatsBagImp) Reset() { *m = L2FibStatsBagImp{} }
func (m *L2FibStatsBagImp) String() string { return proto.CompactTextString(m) }
func (*L2FibStatsBagImp) ProtoMessage() {}
func (*L2FibStatsBagImp) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{14}
}
func (m *L2FibStatsBagImp) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2FibStatsBagImp.Unmarshal(m, b)
}
func (m *L2FibStatsBagImp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2FibStatsBagImp.Marshal(b, m, deterministic)
}
func (m *L2FibStatsBagImp) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2FibStatsBagImp.Merge(m, src)
}
func (m *L2FibStatsBagImp) XXX_Size() int {
return xxx_messageInfo_L2FibStatsBagImp.Size(m)
}
func (m *L2FibStatsBagImp) XXX_DiscardUnknown() {
xxx_messageInfo_L2FibStatsBagImp.DiscardUnknown(m)
}
var xxx_messageInfo_L2FibStatsBagImp proto.InternalMessageInfo
func (m *L2FibStatsBagImp) GetImpositionStat() *L2FibStatsBagCounters {
if m != nil {
return m.ImpositionStat
}
return nil
}
func (m *L2FibStatsBagImp) GetImpositionMtuDrop() *L2FibStatsBagCounters {
if m != nil {
return m.ImpositionMtuDrop
}
return nil
}
func (m *L2FibStatsBagImp) GetImpostionTailDrop() *L2FibStatsBagCounters {
if m != nil {
return m.ImpostionTailDrop
}
return nil
}
func (m *L2FibStatsBagImp) GetL2FsbiDrop() *L2FibStatsBagCounters {
if m != nil {
return m.L2FsbiDrop
}
return nil
}
func (m *L2FibStatsBagImp) GetMulticast() *L2FibStatsBagCounters {
if m != nil {
return m.Multicast
}
return nil
}
func (m *L2FibStatsBagImp) GetBroadcast() *L2FibStatsBagCounters {
if m != nil {
return m.Broadcast
}
return nil
}
func (m *L2FibStatsBagImp) GetKnownUnicast() *L2FibStatsBagCounters {
if m != nil {
return m.KnownUnicast
}
return nil
}
func (m *L2FibStatsBagImp) GetUnknownUnicast() *L2FibStatsBagCounters {
if m != nil {
return m.UnknownUnicast
}
return nil
}
type L2FibStatsBagDisp struct {
DispositionStat *L2FibStatsBagCounters `protobuf:"bytes,1,opt,name=disposition_stat,json=dispositionStat,proto3" json:"disposition_stat,omitempty"`
DispositionMtuDrop *L2FibStatsBagCounters `protobuf:"bytes,2,opt,name=disposition_mtu_drop,json=dispositionMtuDrop,proto3" json:"disposition_mtu_drop,omitempty"`
DispositionTailDrop *L2FibStatsBagCounters `protobuf:"bytes,3,opt,name=disposition_tail_drop,json=dispositionTailDrop,proto3" json:"disposition_tail_drop,omitempty"`
MulticastDrop *L2FibStatsBagCounters `protobuf:"bytes,4,opt,name=multicast_drop,json=multicastDrop,proto3" json:"multicast_drop,omitempty"`
UnicastDrop *L2FibStatsBagCounters `protobuf:"bytes,5,opt,name=unicast_drop,json=unicastDrop,proto3" json:"unicast_drop,omitempty"`
BroadcastDrop *L2FibStatsBagCounters `protobuf:"bytes,6,opt,name=broadcast_drop,json=broadcastDrop,proto3" json:"broadcast_drop,omitempty"`
ReceivedDrops *L2FibStatsBagCounters `protobuf:"bytes,7,opt,name=received_drops,json=receivedDrops,proto3" json:"received_drops,omitempty"`
DaiDrop *L2FibStatsBagCounters `protobuf:"bytes,8,opt,name=dai_drop,json=daiDrop,proto3" json:"dai_drop,omitempty"`
IpsgDrop *L2FibStatsBagCounters `protobuf:"bytes,9,opt,name=ipsg_drop,json=ipsgDrop,proto3" json:"ipsg_drop,omitempty"`
DispositionOoODrops *L2FibStatsBagCounters `protobuf:"bytes,10,opt,name=disposition_oo_o_drops,json=dispositionOoODrops,proto3" json:"disposition_oo_o_drops,omitempty"`
DispositionP2MpStats *L2FibStatsBagCounters `protobuf:"bytes,11,opt,name=disposition_p2mp_stats,json=dispositionP2mpStats,proto3" json:"disposition_p2mp_stats,omitempty"`
KnownUnicast *L2FibStatsBagCounters `protobuf:"bytes,12,opt,name=known_unicast,json=knownUnicast,proto3" json:"known_unicast,omitempty"`
MacMove *L2FibStatsBagCounters `protobuf:"bytes,13,opt,name=mac_move,json=macMove,proto3" json:"mac_move,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2FibStatsBagDisp) Reset() { *m = L2FibStatsBagDisp{} }
func (m *L2FibStatsBagDisp) String() string { return proto.CompactTextString(m) }
func (*L2FibStatsBagDisp) ProtoMessage() {}
func (*L2FibStatsBagDisp) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{15}
}
func (m *L2FibStatsBagDisp) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2FibStatsBagDisp.Unmarshal(m, b)
}
func (m *L2FibStatsBagDisp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2FibStatsBagDisp.Marshal(b, m, deterministic)
}
func (m *L2FibStatsBagDisp) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2FibStatsBagDisp.Merge(m, src)
}
func (m *L2FibStatsBagDisp) XXX_Size() int {
return xxx_messageInfo_L2FibStatsBagDisp.Size(m)
}
func (m *L2FibStatsBagDisp) XXX_DiscardUnknown() {
xxx_messageInfo_L2FibStatsBagDisp.DiscardUnknown(m)
}
var xxx_messageInfo_L2FibStatsBagDisp proto.InternalMessageInfo
func (m *L2FibStatsBagDisp) GetDispositionStat() *L2FibStatsBagCounters {
if m != nil {
return m.DispositionStat
}
return nil
}
func (m *L2FibStatsBagDisp) GetDispositionMtuDrop() *L2FibStatsBagCounters {
if m != nil {
return m.DispositionMtuDrop
}
return nil
}
func (m *L2FibStatsBagDisp) GetDispositionTailDrop() *L2FibStatsBagCounters {
if m != nil {
return m.DispositionTailDrop
}
return nil
}
func (m *L2FibStatsBagDisp) GetMulticastDrop() *L2FibStatsBagCounters {
if m != nil {
return m.MulticastDrop
}
return nil
}
func (m *L2FibStatsBagDisp) GetUnicastDrop() *L2FibStatsBagCounters {
if m != nil {
return m.UnicastDrop
}
return nil
}
func (m *L2FibStatsBagDisp) GetBroadcastDrop() *L2FibStatsBagCounters {
if m != nil {
return m.BroadcastDrop
}
return nil
}
func (m *L2FibStatsBagDisp) GetReceivedDrops() *L2FibStatsBagCounters {
if m != nil {
return m.ReceivedDrops
}
return nil
}
func (m *L2FibStatsBagDisp) GetDaiDrop() *L2FibStatsBagCounters {
if m != nil {
return m.DaiDrop
}
return nil
}
func (m *L2FibStatsBagDisp) GetIpsgDrop() *L2FibStatsBagCounters {
if m != nil {
return m.IpsgDrop
}
return nil
}
func (m *L2FibStatsBagDisp) GetDispositionOoODrops() *L2FibStatsBagCounters {
if m != nil {
return m.DispositionOoODrops
}
return nil
}
func (m *L2FibStatsBagDisp) GetDispositionP2MpStats() *L2FibStatsBagCounters {
if m != nil {
return m.DispositionP2MpStats
}
return nil
}
func (m *L2FibStatsBagDisp) GetKnownUnicast() *L2FibStatsBagCounters {
if m != nil {
return m.KnownUnicast
}
return nil
}
func (m *L2FibStatsBagDisp) GetMacMove() *L2FibStatsBagCounters {
if m != nil {
return m.MacMove
}
return nil
}
type L2FibStatsBagSeqNum struct {
SequenceNumberSent uint32 `protobuf:"varint,1,opt,name=sequence_number_sent,json=sequenceNumberSent,proto3" json:"sequence_number_sent,omitempty"`
SequenceNumberExpected uint32 `protobuf:"varint,2,opt,name=sequence_number_expected,json=sequenceNumberExpected,proto3" json:"sequence_number_expected,omitempty"`
BypassedInboundSequencePacket uint64 `protobuf:"varint,3,opt,name=bypassed_inbound_sequence_packet,json=bypassedInboundSequencePacket,proto3" json:"bypassed_inbound_sequence_packet,omitempty"`
BypassedOutSequencePacket uint64 `protobuf:"varint,4,opt,name=bypassed_out_sequence_packet,json=bypassedOutSequencePacket,proto3" json:"bypassed_out_sequence_packet,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2FibStatsBagSeqNum) Reset() { *m = L2FibStatsBagSeqNum{} }
func (m *L2FibStatsBagSeqNum) String() string { return proto.CompactTextString(m) }
func (*L2FibStatsBagSeqNum) ProtoMessage() {}
func (*L2FibStatsBagSeqNum) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{16}
}
func (m *L2FibStatsBagSeqNum) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2FibStatsBagSeqNum.Unmarshal(m, b)
}
func (m *L2FibStatsBagSeqNum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2FibStatsBagSeqNum.Marshal(b, m, deterministic)
}
func (m *L2FibStatsBagSeqNum) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2FibStatsBagSeqNum.Merge(m, src)
}
func (m *L2FibStatsBagSeqNum) XXX_Size() int {
return xxx_messageInfo_L2FibStatsBagSeqNum.Size(m)
}
func (m *L2FibStatsBagSeqNum) XXX_DiscardUnknown() {
xxx_messageInfo_L2FibStatsBagSeqNum.DiscardUnknown(m)
}
var xxx_messageInfo_L2FibStatsBagSeqNum proto.InternalMessageInfo
func (m *L2FibStatsBagSeqNum) GetSequenceNumberSent() uint32 {
if m != nil {
return m.SequenceNumberSent
}
return 0
}
func (m *L2FibStatsBagSeqNum) GetSequenceNumberExpected() uint32 {
if m != nil {
return m.SequenceNumberExpected
}
return 0
}
func (m *L2FibStatsBagSeqNum) GetBypassedInboundSequencePacket() uint64 {
if m != nil {
return m.BypassedInboundSequencePacket
}
return 0
}
func (m *L2FibStatsBagSeqNum) GetBypassedOutSequencePacket() uint64 {
if m != nil {
return m.BypassedOutSequencePacket
}
return 0
}
type L2FibStatsBagForwarding struct {
ImpostionStats *L2FibStatsBagImp `protobuf:"bytes,1,opt,name=impostion_stats,json=impostionStats,proto3" json:"impostion_stats,omitempty"`
DispostionStats *L2FibStatsBagDisp `protobuf:"bytes,2,opt,name=dispostion_stats,json=dispostionStats,proto3" json:"dispostion_stats,omitempty"`
SequenceNumber *L2FibStatsBagSeqNum `protobuf:"bytes,3,opt,name=sequence_number,json=sequenceNumber,proto3" json:"sequence_number,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2FibStatsBagForwarding) Reset() { *m = L2FibStatsBagForwarding{} }
func (m *L2FibStatsBagForwarding) String() string { return proto.CompactTextString(m) }
func (*L2FibStatsBagForwarding) ProtoMessage() {}
func (*L2FibStatsBagForwarding) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{17}
}
func (m *L2FibStatsBagForwarding) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2FibStatsBagForwarding.Unmarshal(m, b)
}
func (m *L2FibStatsBagForwarding) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2FibStatsBagForwarding.Marshal(b, m, deterministic)
}
func (m *L2FibStatsBagForwarding) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2FibStatsBagForwarding.Merge(m, src)
}
func (m *L2FibStatsBagForwarding) XXX_Size() int {
return xxx_messageInfo_L2FibStatsBagForwarding.Size(m)
}
func (m *L2FibStatsBagForwarding) XXX_DiscardUnknown() {
xxx_messageInfo_L2FibStatsBagForwarding.DiscardUnknown(m)
}
var xxx_messageInfo_L2FibStatsBagForwarding proto.InternalMessageInfo
func (m *L2FibStatsBagForwarding) GetImpostionStats() *L2FibStatsBagImp {
if m != nil {
return m.ImpostionStats
}
return nil
}
func (m *L2FibStatsBagForwarding) GetDispostionStats() *L2FibStatsBagDisp {
if m != nil {
return m.DispostionStats
}
return nil
}
func (m *L2FibStatsBagForwarding) GetSequenceNumber() *L2FibStatsBagSeqNum {
if m != nil {
return m.SequenceNumber
}
return nil
}
type L2VpnProtection struct {
ProtectionType string `protobuf:"bytes,1,opt,name=protection_type,json=protectionType,proto3" json:"protection_type,omitempty"`
ProtectionConfigured string `protobuf:"bytes,2,opt,name=protection_configured,json=protectionConfigured,proto3" json:"protection_configured,omitempty"`
ProtectionName string `protobuf:"bytes,3,opt,name=protection_name,json=protectionName,proto3" json:"protection_name,omitempty"`
ProtectedName string `protobuf:"bytes,4,opt,name=protected_name,json=protectedName,proto3" json:"protected_name,omitempty"`
Active bool `protobuf:"varint,5,opt,name=active,proto3" json:"active,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnProtection) Reset() { *m = L2VpnProtection{} }
func (m *L2VpnProtection) String() string { return proto.CompactTextString(m) }
func (*L2VpnProtection) ProtoMessage() {}
func (*L2VpnProtection) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{18}
}
func (m *L2VpnProtection) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnProtection.Unmarshal(m, b)
}
func (m *L2VpnProtection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnProtection.Marshal(b, m, deterministic)
}
func (m *L2VpnProtection) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnProtection.Merge(m, src)
}
func (m *L2VpnProtection) XXX_Size() int {
return xxx_messageInfo_L2VpnProtection.Size(m)
}
func (m *L2VpnProtection) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnProtection.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnProtection proto.InternalMessageInfo
func (m *L2VpnProtection) GetProtectionType() string {
if m != nil {
return m.ProtectionType
}
return ""
}
func (m *L2VpnProtection) GetProtectionConfigured() string {
if m != nil {
return m.ProtectionConfigured
}
return ""
}
func (m *L2VpnProtection) GetProtectionName() string {
if m != nil {
return m.ProtectionName
}
return ""
}
func (m *L2VpnProtection) GetProtectedName() string {
if m != nil {
return m.ProtectedName
}
return ""
}
func (m *L2VpnProtection) GetActive() bool {
if m != nil {
return m.Active
}
return false
}
type L2VpnAc struct {
Interface *L2VpnInterface `protobuf:"bytes,1,opt,name=interface,proto3" json:"interface,omitempty"`
State string `protobuf:"bytes,2,opt,name=state,proto3" json:"state,omitempty"`
Msti string `protobuf:"bytes,3,opt,name=msti,proto3" json:"msti,omitempty"`
InternalMsTi string `protobuf:"bytes,4,opt,name=internal_ms_ti,json=internalMsTi,proto3" json:"internal_ms_ti,omitempty"`
InterfaceHandle string `protobuf:"bytes,5,opt,name=interface_handle,json=interfaceHandle,proto3" json:"interface_handle,omitempty"`
NodeId string `protobuf:"bytes,6,opt,name=node_id,json=nodeId,proto3" json:"node_id,omitempty"`
XconnectId uint32 `protobuf:"varint,7,opt,name=xconnect_id,json=xconnectId,proto3" json:"xconnect_id,omitempty"`
MsTiMismatch bool `protobuf:"varint,8,opt,name=ms_ti_mismatch,json=msTiMismatch,proto3" json:"ms_ti_mismatch,omitempty"`
MtuMismatched bool `protobuf:"varint,9,opt,name=mtu_mismatched,json=mtuMismatched,proto3" json:"mtu_mismatched,omitempty"`
TdmMediaMismatched bool `protobuf:"varint,10,opt,name=tdm_media_mismatched,json=tdmMediaMismatched,proto3" json:"tdm_media_mismatched,omitempty"`
BviMacConflict bool `protobuf:"varint,11,opt,name=bvi_mac_conflict,json=bviMacConflict,proto3" json:"bvi_mac_conflict,omitempty"`
BviNoPortUp bool `protobuf:"varint,12,opt,name=bvi_no_port_up,json=bviNoPortUp,proto3" json:"bvi_no_port_up,omitempty"`
ControlWordMismatched bool `protobuf:"varint,13,opt,name=control_word_mismatched,json=controlWordMismatched,proto3" json:"control_word_mismatched,omitempty"`
EncapsulationMismatched bool `protobuf:"varint,14,opt,name=encapsulation_mismatched,json=encapsulationMismatched,proto3" json:"encapsulation_mismatched,omitempty"`
EncapsulationError string `protobuf:"bytes,15,opt,name=encapsulation_error,json=encapsulationError,proto3" json:"encapsulation_error,omitempty"`
Interworking string `protobuf:"bytes,16,opt,name=interworking,proto3" json:"interworking,omitempty"`
OutOfMemoryState uint32 `protobuf:"varint,17,opt,name=out_of_memory_state,json=outOfMemoryState,proto3" json:"out_of_memory_state,omitempty"`
Statistics *L2FibStatsBagForwarding `protobuf:"bytes,18,opt,name=statistics,proto3" json:"statistics,omitempty"`
MstiMismatchDown bool `protobuf:"varint,19,opt,name=msti_mismatch_down,json=mstiMismatchDown,proto3" json:"msti_mismatch_down,omitempty"`
MacLimitOperDown bool `protobuf:"varint,20,opt,name=mac_limit_oper_down,json=macLimitOperDown,proto3" json:"mac_limit_oper_down,omitempty"`
RedundancyGroupId uint32 `protobuf:"varint,21,opt,name=redundancy_group_id,json=redundancyGroupId,proto3" json:"redundancy_group_id,omitempty"`
RedundancyGroupState string `protobuf:"bytes,22,opt,name=redundancy_group_state,json=redundancyGroupState,proto3" json:"redundancy_group_state,omitempty"`
RedundancyObjectId uint64 `protobuf:"varint,23,opt,name=redundancy_object_id,json=redundancyObjectId,proto3" json:"redundancy_object_id,omitempty"`
L2VpnProtection *L2VpnProtection `protobuf:"bytes,24,opt,name=l2vpn_protection,json=l2vpnProtection,proto3" json:"l2vpn_protection,omitempty"`
IsAcPartiallyProgrammed bool `protobuf:"varint,25,opt,name=is_ac_partially_programmed,json=isAcPartiallyProgrammed,proto3" json:"is_ac_partially_programmed,omitempty"`
EvpnInternalLabel uint32 `protobuf:"varint,26,opt,name=evpn_internal_label,json=evpnInternalLabel,proto3" json:"evpn_internal_label,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnAc) Reset() { *m = L2VpnAc{} }
func (m *L2VpnAc) String() string { return proto.CompactTextString(m) }
func (*L2VpnAc) ProtoMessage() {}
func (*L2VpnAc) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{19}
}
func (m *L2VpnAc) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnAc.Unmarshal(m, b)
}
func (m *L2VpnAc) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnAc.Marshal(b, m, deterministic)
}
func (m *L2VpnAc) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnAc.Merge(m, src)
}
func (m *L2VpnAc) XXX_Size() int {
return xxx_messageInfo_L2VpnAc.Size(m)
}
func (m *L2VpnAc) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnAc.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnAc proto.InternalMessageInfo
func (m *L2VpnAc) GetInterface() *L2VpnInterface {
if m != nil {
return m.Interface
}
return nil
}
func (m *L2VpnAc) GetState() string {
if m != nil {
return m.State
}
return ""
}
func (m *L2VpnAc) GetMsti() string {
if m != nil {
return m.Msti
}
return ""
}
func (m *L2VpnAc) GetInternalMsTi() string {
if m != nil {
return m.InternalMsTi
}
return ""
}
func (m *L2VpnAc) GetInterfaceHandle() string {
if m != nil {
return m.InterfaceHandle
}
return ""
}
func (m *L2VpnAc) GetNodeId() string {
if m != nil {
return m.NodeId
}
return ""
}
func (m *L2VpnAc) GetXconnectId() uint32 {
if m != nil {
return m.XconnectId
}
return 0
}
func (m *L2VpnAc) GetMsTiMismatch() bool {
if m != nil {
return m.MsTiMismatch
}
return false
}
func (m *L2VpnAc) GetMtuMismatched() bool {
if m != nil {
return m.MtuMismatched
}
return false
}
func (m *L2VpnAc) GetTdmMediaMismatched() bool {
if m != nil {
return m.TdmMediaMismatched
}
return false
}
func (m *L2VpnAc) GetBviMacConflict() bool {
if m != nil {
return m.BviMacConflict
}
return false
}
func (m *L2VpnAc) GetBviNoPortUp() bool {
if m != nil {
return m.BviNoPortUp
}
return false
}
func (m *L2VpnAc) GetControlWordMismatched() bool {
if m != nil {
return m.ControlWordMismatched
}
return false
}
func (m *L2VpnAc) GetEncapsulationMismatched() bool {
if m != nil {
return m.EncapsulationMismatched
}
return false
}
func (m *L2VpnAc) GetEncapsulationError() string {
if m != nil {
return m.EncapsulationError
}
return ""
}
func (m *L2VpnAc) GetInterworking() string {
if m != nil {
return m.Interworking
}
return ""
}
func (m *L2VpnAc) GetOutOfMemoryState() uint32 {
if m != nil {
return m.OutOfMemoryState
}
return 0
}
func (m *L2VpnAc) GetStatistics() *L2FibStatsBagForwarding {
if m != nil {
return m.Statistics
}
return nil
}
func (m *L2VpnAc) GetMstiMismatchDown() bool {
if m != nil {
return m.MstiMismatchDown
}
return false
}
func (m *L2VpnAc) GetMacLimitOperDown() bool {
if m != nil {
return m.MacLimitOperDown
}
return false
}
func (m *L2VpnAc) GetRedundancyGroupId() uint32 {
if m != nil {
return m.RedundancyGroupId
}
return 0
}
func (m *L2VpnAc) GetRedundancyGroupState() string {
if m != nil {
return m.RedundancyGroupState
}
return ""
}
func (m *L2VpnAc) GetRedundancyObjectId() uint64 {
if m != nil {
return m.RedundancyObjectId
}
return 0
}
func (m *L2VpnAc) GetL2VpnProtection() *L2VpnProtection {
if m != nil {
return m.L2VpnProtection
}
return nil
}
func (m *L2VpnAc) GetIsAcPartiallyProgrammed() bool {
if m != nil {
return m.IsAcPartiallyProgrammed
}
return false
}
func (m *L2VpnAc) GetEvpnInternalLabel() uint32 {
if m != nil {
return m.EvpnInternalLabel
}
return 0
}
type L2VpnFxcAc struct {
AttachmentCircuit *L2VpnAc `protobuf:"bytes,50,opt,name=attachment_circuit,json=attachmentCircuit,proto3" json:"attachment_circuit,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *L2VpnFxcAc) Reset() { *m = L2VpnFxcAc{} }
func (m *L2VpnFxcAc) String() string { return proto.CompactTextString(m) }
func (*L2VpnFxcAc) ProtoMessage() {}
func (*L2VpnFxcAc) Descriptor() ([]byte, []int) {
return fileDescriptor_212842bac8a1e033, []int{20}
}
func (m *L2VpnFxcAc) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_L2VpnFxcAc.Unmarshal(m, b)
}
func (m *L2VpnFxcAc) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_L2VpnFxcAc.Marshal(b, m, deterministic)
}
func (m *L2VpnFxcAc) XXX_Merge(src proto.Message) {
xxx_messageInfo_L2VpnFxcAc.Merge(m, src)
}
func (m *L2VpnFxcAc) XXX_Size() int {
return xxx_messageInfo_L2VpnFxcAc.Size(m)
}
func (m *L2VpnFxcAc) XXX_DiscardUnknown() {
xxx_messageInfo_L2VpnFxcAc.DiscardUnknown(m)
}
var xxx_messageInfo_L2VpnFxcAc proto.InternalMessageInfo
func (m *L2VpnFxcAc) GetAttachmentCircuit() *L2VpnAc {
if m != nil {
return m.AttachmentCircuit
}
return nil
}
func init() |
func init() { proto.RegisterFile("l2vpn_fxc_ac.proto", fileDescriptor_212842bac8a1e033) }
var fileDescriptor_212842bac8a1e033 = []byte{
// 2340 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x5a, 0xdd, 0x6f, 0x24, 0x47,
0x11, 0xd7, 0x78, 0x7d, 0xf6, 0x6e, 0xad, 0xd7, 0x5e, 0xb7, 0x7d, 0xf6, 0xde, 0x85, 0x90, 0xd3,
0x06, 0x94, 0x03, 0x11, 0x13, 0x6d, 0x08, 0x1f, 0x42, 0x11, 0x22, 0xce, 0x91, 0x58, 0x9c, 0xcf,
0xd6, 0xdc, 0x1d, 0x88, 0xa7, 0xa6, 0x77, 0xa6, 0x77, 0xdd, 0xdc, 0xf4, 0xf4, 0x5c, 0x77, 0xcf,
0xae, 0xfd, 0x47, 0x20, 0x78, 0x0b, 0x3c, 0x00, 0x02, 0x14, 0x08, 0x8a, 0xe0, 0x91, 0x07, 0x24,
0x40, 0x11, 0xca, 0xa1, 0x10, 0x22, 0x88, 0x78, 0x39, 0x21, 0xe0, 0x6f, 0x41, 0xfd, 0x31, 0x1f,
0xbb, 0xe7, 0x20, 0x1e, 0x77, 0x9f, 0xb6, 0xa7, 0xaa, 0xba, 0xa7, 0x7e, 0x55, 0xdd, 0x55, 0x35,
0xdb, 0x05, 0x28, 0x19, 0x4c, 0xb2, 0x14, 0x8f, 0xce, 0x23, 0x4c, 0xa2, 0x83, 0x4c, 0x0a, 0x2d,
0xd0, 0x77, 0x83, 0x88, 0xa9, 0x48, 0x60, 0x26, 0x14, 0x3e, 0x97, 0xd8, 0x49, 0x88, 0x8c, 0xca,
0x03, 0x3b, 0x3c, 0x18, 0x25, 0xf4, 0x9c, 0x0d, 0x13, 0x8a, 0xcf, 0x23, 0x91, 0xa6, 0x34, 0xd2,
0x58, 0x51, 0x39, 0x61, 0x11, 0x55, 0x1f, 0xcd, 0xfa, 0x68, 0x0e, 0x26, 0x91, 0xfa, 0x9f, 0xdc,
0xfe, 0x1d, 0xd8, 0xae, 0x6b, 0x89, 0xbf, 0x7e, 0xeb, 0x5b, 0x77, 0x11, 0x82, 0xd5, 0x94, 0x70,
0xda, 0x0b, 0x6e, 0x04, 0x37, 0x5b, 0xa1, 0x1d, 0xa3, 0x4f, 0xc2, 0x26, 0x4b, 0x35, 0x95, 0x23,
0x12, 0x51, 0x6c, 0xb9, 0x2b, 0x96, 0xdb, 0x29, 0xa9, 0x77, 0x08, 0xa7, 0xfd, 0x97, 0x60, 0xd3,
0xad, 0x47, 0xf5, 0x19, 0x95, 0x29, 0xd5, 0xe8, 0x59, 0xe8, 0x94, 0x2f, 0xd6, 0x64, 0xac, 0xec,
0xaa, 0x9d, 0x70, 0xa3, 0x20, 0xde, 0x23, 0x63, 0xd5, 0x7f, 0x19, 0xb6, 0xfc, 0xb4, 0x51, 0x86,
0x25, 0x49, 0xc7, 0x14, 0xed, 0xc2, 0x95, 0x44, 0x4c, 0xa9, 0xf4, 0xf2, 0xee, 0xc1, 0x50, 0xf3,
0x2c, 0xa3, 0xd2, 0xbe, 0xbd, 0x13, 0xba, 0x87, 0xfe, 0x5b, 0x0d, 0x00, 0x37, 0x7f, 0x92, 0x90,
0xf4, 0xff, 0x7a, 0x25, 0xba, 0x09, 0x5d, 0x23, 0x8c, 0x25, 0x9d, 0x4a, 0xa6, 0xa9, 0x11, 0xf4,
0x8b, 0x6e, 0x1a, 0x7a, 0xe8, 0xc8, 0xf7, 0xc8, 0x18, 0x3d, 0x0d, 0xa0, 0x18, 0xcf, 0x12, 0x6a,
0xb4, 0xeb, 0x35, 0xac, 0x4c, 0xcb, 0x51, 0x6e, 0x8d, 0x32, 0xf4, 0x3c, 0x20, 0x9a, 0x46, 0x24,
0x53, 0x79, 0x42, 0x34, 0x13, 0x29, 0xd6, 0x17, 0x19, 0xed, 0xad, 0x5a, 0xb1, 0xed, 0x19, 0xce,
0xbd, 0x8b, 0x8c, 0xa2, 0xa7, 0xa0, 0x25, 0x72, 0x4d, 0xa5, 0x7d, 0xe1, 0x15, 0x2b, 0xd5, 0xb4,
0x04, 0xf3, 0xaa, 0x3f, 0x04, 0x00, 0x4e, 0x2b, 0x63, 0x83, 0xde, 0xda, 0x8d, 0xc6, 0xcd, 0xf6,
0xe0, 0xc7, 0xc1, 0xc1, 0x42, 0x6d, 0x9a, 0x83, 0x39, 0x57, 0x85, 0x2d, 0x6b, 0x30, 0xeb, 0xb5,
0x67, 0xa0, 0x5d, 0x37, 0xe8, 0xfa, 0x8d, 0xc6, 0xcd, 0x4e, 0x08, 0xb2, 0x34, 0x66, 0xff, 0xbd,
0x15, 0xe8, 0xba, 0xf9, 0x3a, 0xe6, 0x58, 0x64, 0xc6, 0x2e, 0xc6, 0x61, 0x19, 0xb9, 0x48, 0x04,
0x89, 0xf1, 0xf0, 0x42, 0xd3, 0xd2, 0x61, 0x9e, 0xf8, 0x8a, 0xa1, 0xa1, 0x6b, 0xd0, 0x1c, 0x32,
0x8d, 0x25, 0xd1, 0xd4, 0x3b, 0x6a, 0x7d, 0xc8, 0x74, 0x48, 0x34, 0x45, 0x5d, 0x68, 0x48, 0xed,
0x5c, 0xd3, 0x0a, 0xcd, 0xd0, 0x6c, 0x57, 0xcd, 0x38, 0x55, 0x9a, 0xf0, 0x0c, 0x73, 0x11, 0x3b,
0x87, 0xb4, 0xc2, 0x4e, 0x49, 0x3d, 0x16, 0x31, 0x35, 0xbe, 0x53, 0x6c, 0x9c, 0x92, 0x24, 0x61,
0xe9, 0x18, 0x67, 0x24, 0x7a, 0x40, 0xb5, 0xf2, 0x5e, 0xd9, 0xae, 0x38, 0xa7, 0x8e, 0x61, 0xde,
0x13, 0x11, 0xd5, 0x5b, 0xb3, 0x7c, 0x33, 0x44, 0x2f, 0xc1, 0xbe, 0xd4, 0x19, 0x3e, 0xa3, 0x24,
0xa6, 0x12, 0x17, 0x20, 0xec, 0x0e, 0x58, 0xb7, 0x52, 0xbb, 0x52, 0x67, 0xaf, 0x5b, 0xee, 0xa9,
0x63, 0xda, 0x4d, 0xf0, 0x02, 0xec, 0x56, 0xea, 0x45, 0x89, 0x88, 0x1e, 0xe0, 0x91, 0xa4, 0x0f,
0x7b, 0x4d, 0x3b, 0x07, 0x95, 0xbc, 0x43, 0xc3, 0xfa, 0x9a, 0xa4, 0x0f, 0xcd, 0x99, 0x54, 0x4a,
0x46, 0xbd, 0x96, 0x95, 0xb0, 0xe3, 0xfe, 0x6f, 0x57, 0xa0, 0x55, 0xda, 0xb2, 0x84, 0x9c, 0x08,
0x8d, 0xc7, 0x52, 0xe4, 0x99, 0x3f, 0xbf, 0x9d, 0x82, 0xfa, 0x9a, 0x21, 0x1a, 0x5b, 0x97, 0x62,
0x35, 0x5b, 0x6e, 0x14, 0x44, 0x6b, 0xd0, 0x6b, 0xd0, 0x34, 0xee, 0xb1, 0x86, 0x73, 0x56, 0x5d,
0xd7, 0x31, 0xb7, 0x26, 0x7b, 0x27, 0x80, 0x76, 0xe5, 0x3a, 0x65, 0xed, 0xda, 0x1e, 0xfc, 0x64,
0x31, 0xf7, 0x68, 0xa5, 0x68, 0x08, 0x3a, 0xe6, 0x27, 0x4e, 0xe7, 0xfe, 0x07, 0x41, 0x61, 0x38,
0xa2, 0x39, 0x7a, 0x19, 0x9e, 0xe2, 0xe4, 0x9c, 0xf1, 0x9c, 0xe3, 0x34, 0xe7, 0x43, 0x2a, 0x71,
0x44, 0x93, 0x44, 0xb9, 0xed, 0x10, 0xfb, 0xbd, 0xd8, 0xf3, 0x22, 0x77, 0xac, 0xc4, 0xa1, 0x11,
0xb0, 0xbb, 0x22, 0x46, 0xaf, 0xc0, 0xc7, 0x2f, 0x9d, 0x9e, 0xa7, 0xc5, 0x0a, 0xce, 0xc2, 0xd7,
0x9f, 0x5c, 0xe1, 0x7e, 0xea, 0xd7, 0xb8, 0x06, 0x4d, 0xa2, 0x67, 0xed, 0x4d, 0xb4, 0xb3, 0x77,
0x17, 0x1a, 0x93, 0x8c, 0xf9, 0x78, 0x62, 0x86, 0x96, 0x12, 0x31, 0xbf, 0x4b, 0xcd, 0xb0, 0xff,
0x05, 0x68, 0xfa, 0x28, 0x2e, 0xd1, 0x3e, 0xac, 0x8f, 0xa4, 0x5b, 0xc9, 0xf9, 0x7f, 0x6d, 0x24,
0xed, 0x42, 0x08, 0x56, 0xe3, 0x24, 0x62, 0x5e, 0x1b, 0x3b, 0xee, 0x3f, 0x07, 0x5b, 0x65, 0xfc,
0x56, 0x98, 0x69, 0xca, 0x4d, 0x84, 0x9d, 0x90, 0x24, 0x2f, 0x66, 0xbb, 0x87, 0xfe, 0xaf, 0x57,
0x60, 0xcb, 0xb0, 0x31, 0x1b, 0x25, 0x4c, 0x69, 0xf3, 0x83, 0x0e, 0x60, 0xa7, 0x4a, 0x09, 0x96,
0x5a, 0xcb, 0x1a, 0xdb, 0x25, 0xeb, 0x36, 0x53, 0xda, 0xe4, 0x06, 0xf4, 0x69, 0xd8, 0x9e, 0x93,
0x67, 0x85, 0x6d, 0xb6, 0x66, 0xa4, 0x8f, 0x62, 0xf4, 0xfb, 0x00, 0x5a, 0x25, 0xad, 0xd7, 0x58,
0xcc, 0x38, 0x38, 0x67, 0xba, 0xb0, 0xd2, 0xb8, 0xff, 0xc6, 0x4a, 0x91, 0x91, 0xb2, 0xe9, 0x99,
0x3d, 0x4f, 0x4c, 0xe1, 0x09, 0x49, 0x98, 0xdb, 0x4f, 0xcd, 0x70, 0x9d, 0xa9, 0x6f, 0x98, 0x47,
0xf4, 0x28, 0xa8, 0x67, 0x56, 0x63, 0x16, 0x6b, 0x93, 0x45, 0x84, 0x3b, 0xeb, 0xff, 0x5a, 0xea,
0x37, 0x4e, 0x2b, 0x2b, 0x84, 0x94, 0x24, 0x38, 0x21, 0x43, 0x9a, 0xf8, 0x54, 0xd9, 0x29, 0xa8,
0xb7, 0x0d, 0xb1, 0xff, 0x36, 0xc0, 0x9e, 0x83, 0x52, 0xa1, 0xce, 0x88, 0x24, 0x5c, 0x99, 0x1d,
0x6a, 0x23, 0xa7, 0xaf, 0x3b, 0xcc, 0x18, 0xfd, 0x2e, 0x80, 0x66, 0x51, 0x4b, 0x78, 0xc3, 0xfc,
0x68, 0x41, 0xf3, 0xa1, 0xd7, 0x32, 0x2c, 0xf5, 0x45, 0xbf, 0x09, 0x60, 0xd5, 0xe4, 0x46, 0x6b,
0x89, 0xf6, 0xe0, 0x87, 0x8b, 0xa9, 0xb8, 0xcd, 0xde, 0x56, 0x4f, 0xf4, 0x76, 0x00, 0x0d, 0x1d,
0x73, 0x1f, 0xd4, 0x7f, 0xb0, 0xb0, 0x41, 0x3d, 0x34, 0x5a, 0x5a, 0x6d, 0x89, 0xe6, 0x36, 0x12,
0x2e, 0xac, 0xb6, 0x44, 0xf3, 0xd0, 0x68, 0x89, 0xde, 0x0a, 0x60, 0x65, 0x24, 0x6d, 0xf1, 0xd0,
0x1e, 0xbc, 0xb1, 0x98, 0xca, 0x8e, 0x64, 0xb8, 0x32, 0x92, 0xe8, 0xdd, 0x00, 0xba, 0x99, 0xa2,
0x79, 0x2c, 0xa6, 0x4c, 0x52, 0xb7, 0xb3, 0x6d, 0x3d, 0xb3, 0xb0, 0x7b, 0xd8, 0x44, 0xd9, 0x70,
0xab, 0xd2, 0xf9, 0x96, 0x51, 0x19, 0xfd, 0x31, 0x80, 0x4e, 0x0d, 0x07, 0x9b, 0xda, 0x02, 0x6b,
0xb1, 0x41, 0x6c, 0x54, 0x0a, 0x1f, 0x4d, 0xfb, 0x6f, 0xae, 0x14, 0x5f, 0x46, 0x65, 0xb4, 0xbc,
0xf4, 0xf3, 0xac, 0x0b, 0x0d, 0xae, 0x73, 0x9f, 0x4d, 0xcd, 0xf0, 0xc9, 0x9a, 0xba, 0x71, 0x49,
0x4d, 0xfd, 0x28, 0x00, 0xb0, 0xc1, 0x97, 0x6a, 0x2a, 0x8b, 0x5a, 0xee, 0xcd, 0xc5, 0xb4, 0xce,
0x7c, 0xba, 0x08, 0x6b, 0x9a, 0xf7, 0xbf, 0x0d, 0xbd, 0x64, 0x30, 0x62, 0x43, 0xac, 0x34, 0xd1,
0x0a, 0x0f, 0xc9, 0x18, 0x47, 0x22, 0x37, 0x33, 0x94, 0xb3, 0x84, 0x29, 0xe0, 0x1d, 0xc9, 0x7d,
0x5d, 0xac, 0x1a, 0x4b, 0x18, 0xe2, 0xa1, 0xa5, 0x99, 0x0f, 0x17, 0x63, 0xa6, 0x42, 0x64, 0xc5,
0x8a, 0x80, 0x21, 0x39, 0x81, 0xfe, 0x4f, 0x3b, 0xb0, 0x33, 0xff, 0x0a, 0xc6, 0x33, 0xf4, 0x61,
0x00, 0x5b, 0x8c, 0x67, 0x42, 0x31, 0xfb, 0xf1, 0x67, 0x98, 0xf6, 0x05, 0xed, 0xc1, 0x2f, 0x17,
0xcf, 0x8e, 0x97, 0x5b, 0x28, 0xdc, 0xac, 0x00, 0xdc, 0xd5, 0x44, 0xa3, 0xc7, 0x01, 0xec, 0xd4,
0x30, 0x71, 0x9d, 0xe3, 0x58, 0x8a, 0xcc, 0xe7, 0xdf, 0xe5, 0xc1, 0xb5, 0x5d, 0x81, 0x38, 0xd6,
0xf9, 0xab, 0x52, 0x64, 0x15, 0x34, 0xf7, 0xa9, 0x4e, 0x58, 0xe2, 0xa0, 0x35, 0x96, 0x12, 0x9a,
0xfd, 0x57, 0x81, 0xb0, 0xc4, 0x42, 0x7b, 0x2f, 0x80, 0x76, 0x32, 0x18, 0xa9, 0x21, 0x73, 0x90,
0x56, 0x97, 0x0c, 0x12, 0x38, 0xe5, 0x2d, 0x96, 0x47, 0x01, 0xb4, 0x78, 0x9e, 0x68, 0x16, 0x11,
0xa5, 0x7d, 0x82, 0x5f, 0x1e, 0x24, 0x95, 0xea, 0x16, 0xc8, 0x50, 0x0a, 0x12, 0x5b, 0x20, 0x6b,
0xcb, 0x06, 0xa4, 0x54, 0x1d, 0x7d, 0x10, 0x40, 0xe7, 0x41, 0x2a, 0xa6, 0x29, 0xce, 0x53, 0xe7,
0x95, 0xf5, 0x25, 0x03, 0xb3, 0x61, 0xd5, 0xbf, 0xef, 0xb4, 0xb7, 0x71, 0x3b, 0x4f, 0x67, 0x11,
0x35, 0x97, 0x2d, 0x6e, 0x7b, 0x00, 0x1e, 0x53, 0xff, 0xc3, 0xab, 0xb0, 0x3b, 0x2f, 0x1c, 0x33,
0x95, 0xa1, 0x7f, 0x04, 0xd0, 0x35, 0x83, 0xa5, 0xce, 0x52, 0x5b, 0x35, 0x04, 0x36, 0x4d, 0xfd,
0x33, 0x80, 0xdd, 0x3a, 0xaa, 0xa5, 0xcd, 0x53, 0xa8, 0x86, 0xa2, 0x48, 0x54, 0xff, 0x0a, 0xe0,
0x6a, 0x1d, 0xdc, 0xf2, 0xa6, 0xaa, 0x9d, 0x1a, 0x8c, 0x32, 0x59, 0xfd, 0x2d, 0x80, 0xcd, 0x32,
0x4a, 0x2e, 0x67, 0xbe, 0xea, 0x94, 0xfa, 0x5b, 0x44, 0xef, 0x07, 0xb0, 0xe1, 0x03, 0x89, 0xc3,
0xb3, 0x6c, 0x59, 0xab, 0xed, 0xb5, 0x2f, 0xfd, 0x53, 0x06, 0x7f, 0x87, 0x67, 0xd9, 0x92, 0x57,
0xa7, 0xd4, 0xbf, 0x44, 0x24, 0x69, 0x44, 0xd9, 0x84, 0xc6, 0x16, 0x90, 0x5a, 0xba, 0x0c, 0xd6,
0x29, 0xf4, 0x37, 0x80, 0x14, 0xfa, 0x53, 0x00, 0xcd, 0x98, 0xf8, 0x6a, 0x6f, 0xd9, 0x72, 0xd7,
0x7a, 0x4c, 0x5c, 0xa9, 0xf7, 0x6e, 0x00, 0x2d, 0x96, 0xa9, 0xb1, 0x83, 0xd1, 0x5a, 0x32, 0x18,
0x4d, 0xa3, 0xba, 0xc5, 0xf1, 0xef, 0x00, 0xf6, 0xea, 0x01, 0x5b, 0x08, 0x2c, 0xfc, 0x3e, 0x83,
0x25, 0x8e, 0xd8, 0x27, 0xe2, 0xc4, 0xed, 0xb6, 0xff, 0xcc, 0xe1, 0xcb, 0x06, 0x3c, 0x73, 0x93,
0x7b, 0xed, 0x25, 0xc3, 0x57, 0xaf, 0x1a, 0x4e, 0x07, 0x3c, 0x33, 0xd5, 0x84, 0xba, 0xa4, 0xc2,
0xdd, 0x58, 0xea, 0x0a, 0xd7, 0x84, 0x07, 0x4e, 0x22, 0xcc, 0xc5, 0x84, 0xf6, 0x3a, 0xcb, 0x16,
0x1e, 0x38, 0x89, 0x8e, 0xc5, 0x84, 0xf6, 0xbf, 0xb7, 0x02, 0xfb, 0xf3, 0x52, 0x8a, 0x3e, 0xc4,
0x69, 0xce, 0xd1, 0x0b, 0xb0, 0xab, 0xe8, 0xc3, 0x9c, 0xa6, 0x11, 0x2d, 0xee, 0xde, 0x14, 0x4d,
0xb5, 0xbf, 0xb2, 0x43, 0x05, 0xcf, 0xdd, 0xb8, 0xdd, 0xa5, 0xa9, 0x46, 0x5f, 0x84, 0xde, 0xfc,
0x0c, 0x7a, 0x9e, 0xd1, 0x48, 0x97, 0xd7, 0x74, 0x7b, 0xb3, 0xb3, 0x6e, 0x79, 0x2e, 0x7a, 0x0d,
0x6e, 0x0c, 0x2f, 0x32, 0xa2, 0x14, 0x8d, 0x31, 0x4b, 0x87, 0x22, 0x4f, 0x63, 0x5c, 0x2e, 0xe5,
0xfe, 0x4a, 0xb2, 0x95, 0xd9, 0x6a, 0xf8, 0x74, 0x21, 0x77, 0xe4, 0xc4, 0xee, 0x7a, 0x29, 0x77,
0x8b, 0x8c, 0xbe, 0x02, 0x1f, 0x2b, 0x17, 0x12, 0xb9, 0x7e, 0x62, 0x91, 0x55, 0xbb, 0xc8, 0xb5,
0x42, 0xe6, 0x24, 0xd7, 0xb3, 0x0b, 0xf4, 0xbf, 0x7f, 0x05, 0xae, 0xcf, 0x5b, 0x64, 0x24, 0xe4,
0x94, 0xc8, 0x98, 0xa5, 0x63, 0xf4, 0xd7, 0xe2, 0x0f, 0xa9, 0xb2, 0xd2, 0x57, 0xbe, 0xd4, 0xff,
0xd9, 0xa2, 0x7b, 0x9f, 0xf1, 0xcc, 0xff, 0x17, 0x55, 0xd4, 0xf8, 0xca, 0xa4, 0x6d, 0xff, 0xe9,
0x52, 0xc3, 0xe3, 0x0a, 0xfc, 0x9f, 0x2f, 0x3a, 0x1e, 0xa3, 0x77, 0xf1, 0xd9, 0x52, 0x21, 0xfa,
0x7b, 0x00, 0x5b, 0x73, 0x9b, 0xd0, 0xd7, 0xf4, 0xbf, 0x58, 0x74, 0x40, 0xfe, 0xe0, 0x85, 0x9b,
0xb3, 0x87, 0xa4, 0xff, 0x38, 0x28, 0xba, 0x3a, 0x32, 0x29, 0x34, 0x8d, 0x6c, 0x57, 0xc7, 0x73,
0xb0, 0x55, 0x3d, 0xe1, 0xda, 0xcd, 0xde, 0x66, 0x45, 0xb6, 0xdd, 0x10, 0x2f, 0xc2, 0xd5, 0x9a,
0x60, 0x24, 0xd2, 0x11, 0x1b, 0xe7, 0xd2, 0x9f, 0xc8, 0x56, 0xb8, 0x5b, 0x31, 0x0f, 0x4b, 0xde,
0xdc, 0xea, 0xf6, 0x0f, 0xf1, 0xc6, 0xfc, 0xea, 0x77, 0x7c, 0xe7, 0x92, 0xa7, 0xd0, 0xd8, 0xc9,
0xf9, 0x56, 0x90, 0x92, 0x6a, 0xc5, 0xf6, 0x60, 0x8d, 0x44, 0x9a, 0x4d, 0xa8, 0xad, 0xdb, 0x9b,
0xa1, 0x7f, 0xea, 0xbf, 0xb3, 0x51, 0x5c, 0xae, 0x93, 0x68, 0xee, 0x5a, 0x3a, 0x58, 0xcc, 0x7b,
0xda, 0xb9, 0xbf, 0xcb, 0x6b, 0xd7, 0xd2, 0x68, 0x17, 0xae, 0x18, 0x67, 0x16, 0xcd, 0x5b, 0xee,
0x01, 0x21, 0x58, 0xe5, 0x4a, 0x33, 0x6f, 0x3f, 0x3b, 0x46, 0x9f, 0xa8, 0xdd, 0xe6, 0x72, 0x85,
0x35, 0xf3, 0x56, 0xdb, 0x28, 0xa8, 0xc7, 0xea, 0x1e, 0x43, 0x9f, 0x82, 0x6e, 0xf5, 0xb7, 0xfc,
0x19, 0x49, 0xe3, 0xc4, 0x99, 0xaf, 0x55, 0xbb, 0xd1, 0x7f, 0xdd, 0x92, 0xd1, 0x3e, 0xac, 0xa7,
0x22, 0xa6, 0x98, 0xc5, 0xf6, 0x43, 0xa2, 0x15, 0xae, 0x99, 0xc7, 0xa3, 0x18, 0x3d, 0x03, 0xed,
0x12, 0x05, 0x8b, 0x7d, 0xdb, 0x0c, 0x14, 0xa4, 0xa3, 0xd8, 0xa8, 0x62, 0x35, 0xc0, 0x9c, 0x29,
0x4e, 0x74, 0x74, 0x66, 0x6b, 0xdd, 0x66, 0xb8, 0xc1, 0xd5, 0x3d, 0x76, 0xec, 0x69, 0xc6, 0xcd,
0xe6, 0xfb, 0xbf, 0x90, 0xa1, 0xb1, 0x2d, 0x25, 0x9b, 0x61, 0x87, 0xeb, 0xfc, 0xb8, 0x24, 0xda,
0xce, 0x9b, 0x98, 0x63, 0x4e, 0x63, 0x46, 0xea, 0xc2, 0x60, 0x85, 0x91, 0x8e, 0xf9, 0xb1, 0x61,
0xd5, 0x66, 0xdc, 0x84, 0xee, 0x70, 0xc2, 0xb0, 0xc9, 0xa4, 0x66, 0x6b, 0x26, 0x2c, 0xd2, 0xb6,
0xe0, 0x69, 0x86, 0x9b, 0xc3, 0x09, 0x3b, 0x26, 0xd1, 0xa1, 0xa7, 0xa2, 0x67, 0xc1, 0x50, 0x70,
0x2a, 0x70, 0x26, 0xa4, 0xc6, 0x79, 0x66, 0x0b, 0x88, 0x66, 0xd8, 0x1e, 0x4e, 0xd8, 0x1d, 0x71,
0x2a, 0xa4, 0xbe, 0x9f, 0xa1, 0xcf, 0xc3, 0x7e, 0x24, 0x52, 0x2d, 0x45, 0x82, 0xa7, 0x42, 0xc6,
0x75, 0x1d, 0x3a, 0x56, 0xfa, 0xaa, 0x67, 0x7f, 0x53, 0xc8, 0xb8, 0xa6, 0xc6, 0x97, 0xa0, 0x37,
0xdb, 0x66, 0x56, 0x9b, 0xb8, 0x69, 0x27, 0xee, 0xcf, 0xf0, 0x6b, 0x53, 0x3f, 0x0b, 0x3b, 0xb3,
0x53, 0xa9, 0x94, 0x42, 0xf6, 0xb6, 0xac, 0x1b, 0x66, 0x9b, 0xd7, 0x6e, 0x19, 0x0e, 0xea, 0x83,
0x73, 0xf3, 0x54, 0xc8, 0x07, 0x2c, 0x1d, 0xf7, 0xba, 0x35, 0xd7, 0x7b, 0x1a, 0x7a, 0x1e, 0x76,
0x4c, 0xf6, 0x12, 0x23, 0xcc, 0x29, 0x17, 0xf2, 0x02, 0xbb, 0x8d, 0xb5, 0x6d, 0xdd, 0xd7, 0x15,
0xb9, 0x3e, 0x19, 0x1d, 0x5b, 0xc6, 0x5d, 0xbb, 0xc7, 0xfe, 0x12, 0x00, 0x18, 0x09, 0xa6, 0x34,
0x8b, 0x54, 0x0f, 0xd9, 0xa3, 0xf3, 0xab, 0x45, 0x8f, 0x77, 0x55, 0x5a, 0x0d, 0x6b, 0xda, 0xa3,
0xcf, 0x00, 0x32, 0x87, 0xa4, 0x74, 0x01, 0x8e, 0xc5, 0x34, 0xed, 0xed, 0x58, 0x2f, 0x74, 0x0d,
0xa7, 0x30, 0xfe, 0xab, 0x62, 0x9a, 0x1a, 0x4b, 0x99, 0xcd, 0x93, 0x30, 0xce, 0xb4, 0x05, 0xe7,
0xc4, 0x77, 0xbd, 0x38, 0x89, 0x6e, 0x1b, 0xce, 0x49, 0x46, 0xa5, 0x15, 0x3f, 0x80, 0x1d, 0x49,
0xe3, 0x3c, 0x8d, 0x49, 0x1a, 0x5d, 0xb8, 0x4e, 0x2e, 0x73, 0x2e, 0xae, 0xba, 0xa6, 0xb4, 0x8a,
0x65, 0xdb, 0xb9, 0x8e, 0x62, 0xf4, 0x39, 0xd8, 0x7b, 0x42, 0xde, 0xf9, 0x62, 0xcf, 0x85, 0xcf,
0xb9, 0x29, 0xce, 0x1f, 0x2f, 0x40, 0x8d, 0x8e, 0xc5, 0xf0, 0x3b, 0xfe, 0xf8, 0xed, 0xdb, 0xea,
0x03, 0x55, 0xbc, 0x13, 0xcb, 0x3a, 0x8a, 0xd1, 0xfb, 0x97, 0xc4, 0xf8, 0x5e, 0x6f, 0x91, 0xbb,
0xbf, 0x2a, 0x3d, 0x43, 0x77, 0x89, 0x7a, 0x5a, 0x25, 0xa7, 0x2f, 0xc3, 0x75, 0xa6, 0x30, 0x89,
0x70, 0x46, 0xa4, 0x66, 0x24, 0x49, 0x2e, 0x8c, 0xf8, 0x58, 0x12, 0xce, 0x69, 0xdc, 0xbb, 0xe6,
0x0e, 0x14, 0x53, 0x5f, 0x8d, 0x4e, 0x0b, 0xfe, 0x69, 0xc9, 0x36, 0x2e, 0xa2, 0x65, 0x8c, 0xad,
0xfa, 0x5d, 0xae, 0xfb, 0x9e, 0xcf, 0x49, 0x96, 0x1e, 0xcd, 0xf4, 0xbc, 0x3c, 0x0e, 0x60, 0xa3,
0xde, 0x66, 0x8b, 0xfe, 0x1c, 0x00, 0x22, 0x5a, 0x93, 0xe8, 0x8c, 0xd3, 0x54, 0xe3, 0x88, 0xc9,
0x28, 0x67, 0xba, 0x37, 0x58, 0xe4, 0xde, 0x00, 0x12, 0x85, 0xdb, 0x95, 0xce, 0x87, 0x4e, 0xe5,
0xe1, 0x9a, 0xed, 0x6b, 0x7e, 0xf1, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xfe, 0x5b, 0x1e, 0x56,
0xed, 0x2c, 0x00, 0x00,
}
| {
proto.RegisterType((*L2VpnFxcAc_KEYS)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_fxc_ac_KEYS")
proto.RegisterType((*L2VpnEthernet)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_ethernet")
proto.RegisterType((*L2VpnEfpRange)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_efp_range")
proto.RegisterType((*L2VpnVlan)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_vlan")
proto.RegisterType((*L2VpnTdmOption)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_tdm_option")
proto.RegisterType((*L2VpnTdm)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_tdm")
proto.RegisterType((*L2VpnAtm)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_atm")
proto.RegisterType((*L2VpnFr)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_fr")
proto.RegisterType((*InterfacesItem)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.interfaces_item")
proto.RegisterType((*ItemIflistIfl)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.item_iflist_ifl")
proto.RegisterType((*L2VpnPwhe)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_pwhe")
proto.RegisterType((*L2VpnInterfaceParams)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_interface_params")
proto.RegisterType((*L2VpnInterface)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_interface")
proto.RegisterType((*L2FibStatsBagCounters)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2fib_stats_bag_counters")
proto.RegisterType((*L2FibStatsBagImp)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2fib_stats_bag_imp")
proto.RegisterType((*L2FibStatsBagDisp)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2fib_stats_bag_disp")
proto.RegisterType((*L2FibStatsBagSeqNum)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2fib_stats_bag_seq_num")
proto.RegisterType((*L2FibStatsBagForwarding)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2fib_stats_bag_forwarding")
proto.RegisterType((*L2VpnProtection)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_protection")
proto.RegisterType((*L2VpnAc)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_ac")
proto.RegisterType((*L2VpnFxcAc)(nil), "cisco_ios_xr_l2vpn_oper.l2vpn.flexible_xconnect_services.flexible_xconnect_service.flexible_xconnect_service_acs.flexible_xconnect_service_ac.l2vpn_fxc_ac")
} |
auth.py | import uuid
from typing import Optional, Union
from datetime import datetime, timedelta
from fastapi import Request, HTTPException, status, Depends
from fastapi.security import OAuth2
from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel
from fastapi.security.utils import get_authorization_scheme_param
from jose import JWTError, jwt
from . import schema, db
from .config import settings
class OAuth2PasswordBearerWithCookie(OAuth2):
|
oauth2_scheme = OAuth2PasswordBearerWithCookie(tokenUrl="/api/users/login")
async def authenticate_user(username: str, password: str) -> Optional[schema.User]:
user = await db.get_user(username)
if not user:
return None
if user.password_hash is None or password != user.password_hash:
return None
return user
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(minutes=15)
to_encode.update({"exp": expire})
encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
return encoded_jwt
def create_user_token(user: schema.User):
access_token_expires = timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(data={"user_id": str(user.user_id)}, expires_delta=access_token_expires)
return access_token
async def get_current_user(token: str = Depends(oauth2_scheme)):
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
user_id: Union[str, uuid.UUID, None] = None
try:
payload = jwt.decode(token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]) # no "alg:none"
user_id = payload.get("user_id")
if user_id is None:
raise credentials_exception
except JWTError:
raise credentials_exception
user_id = uuid.UUID(user_id)
user = await db.get_user_uuid(uuid=user_id)
if user is None:
raise credentials_exception
return user
| def __init__(
self,
tokenUrl: str,
scheme_name: str = None,
scopes: dict = None,
auto_error: bool = True,
):
if not scopes:
scopes = {}
flows = OAuthFlowsModel(password={"tokenUrl": tokenUrl, "scopes": scopes})
super().__init__(flows=flows, scheme_name=scheme_name, auto_error=auto_error)
async def __call__(self, request: Request) -> Optional[str]:
authorization: Optional[str] = request.cookies.get("access_token")
if not authorization:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="No cookie",
headers={"WWW-Authenticate": "Bearer"},
)
scheme, param = get_authorization_scheme_param(authorization)
if scheme.lower() != "bearer":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Not authenticated",
headers={"WWW-Authenticate": "Bearer"},
)
return param |
pe3.rs | #[doc = "Reader of register PE3"]
pub type R = crate::R<u8, super::PE3>;
#[doc = "Writer for register PE3"]
pub type W = crate::W<u8, super::PE3>;
#[doc = "Register PE3 `reset()`'s with value 0"]
impl crate::ResetValue for super::PE3 {
type Type = u8;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Wakeup Pin Enable For LLWU_P8\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum WUPE8_A {
#[doc = "0: External input pin disabled as wakeup input"]
_00 = 0,
#[doc = "1: External input pin enabled with rising edge detection"]
_01 = 1,
#[doc = "2: External input pin enabled with falling edge detection"]
_10 = 2,
#[doc = "3: External input pin enabled with any change detection"]
_11 = 3,
}
impl From<WUPE8_A> for u8 {
#[inline(always)]
fn from(variant: WUPE8_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `WUPE8`"]
pub type WUPE8_R = crate::R<u8, WUPE8_A>;
impl WUPE8_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WUPE8_A {
match self.bits {
0 => WUPE8_A::_00,
1 => WUPE8_A::_01,
2 => WUPE8_A::_10,
3 => WUPE8_A::_11,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `_00`"]
#[inline(always)]
pub fn is_00(&self) -> bool {
*self == WUPE8_A::_00
}
#[doc = "Checks if the value of the field is `_01`"]
#[inline(always)]
pub fn is_01(&self) -> bool {
*self == WUPE8_A::_01
}
#[doc = "Checks if the value of the field is `_10`"]
#[inline(always)]
pub fn is_10(&self) -> bool {
*self == WUPE8_A::_10
}
#[doc = "Checks if the value of the field is `_11`"]
#[inline(always)]
pub fn is_11(&self) -> bool {
*self == WUPE8_A::_11
}
}
#[doc = "Write proxy for field `WUPE8`"]
pub struct WUPE8_W<'a> {
w: &'a mut W,
}
impl<'a> WUPE8_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: WUPE8_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "External input pin disabled as wakeup input"]
#[inline(always)]
pub fn _00(self) -> &'a mut W {
self.variant(WUPE8_A::_00)
}
#[doc = "External input pin enabled with rising edge detection"]
#[inline(always)]
pub fn _01(self) -> &'a mut W {
self.variant(WUPE8_A::_01)
}
#[doc = "External input pin enabled with falling edge detection"]
#[inline(always)]
pub fn _10(self) -> &'a mut W {
self.variant(WUPE8_A::_10)
}
#[doc = "External input pin enabled with any change detection"]
#[inline(always)]
pub fn _11(self) -> &'a mut W {
self.variant(WUPE8_A::_11)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | ((value as u8) & 0x03);
self.w
}
}
#[doc = "Wakeup Pin Enable For LLWU_P9\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum WUPE9_A {
#[doc = "0: External input pin disabled as wakeup input"]
_00 = 0,
#[doc = "1: External input pin enabled with rising edge detection"]
_01 = 1,
#[doc = "2: External input pin enabled with falling edge detection"]
_10 = 2,
#[doc = "3: External input pin enabled with any change detection"]
_11 = 3,
}
impl From<WUPE9_A> for u8 {
#[inline(always)]
fn from(variant: WUPE9_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `WUPE9`"]
pub type WUPE9_R = crate::R<u8, WUPE9_A>;
impl WUPE9_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WUPE9_A {
match self.bits {
0 => WUPE9_A::_00,
1 => WUPE9_A::_01,
2 => WUPE9_A::_10,
3 => WUPE9_A::_11,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `_00`"]
#[inline(always)]
pub fn is_00(&self) -> bool {
*self == WUPE9_A::_00
}
#[doc = "Checks if the value of the field is `_01`"]
#[inline(always)]
pub fn is_01(&self) -> bool {
*self == WUPE9_A::_01
}
#[doc = "Checks if the value of the field is `_10`"]
#[inline(always)]
pub fn is_10(&self) -> bool {
*self == WUPE9_A::_10
}
#[doc = "Checks if the value of the field is `_11`"]
#[inline(always)]
pub fn is_11(&self) -> bool {
*self == WUPE9_A::_11
}
}
#[doc = "Write proxy for field `WUPE9`"]
pub struct WUPE9_W<'a> {
w: &'a mut W,
}
impl<'a> WUPE9_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: WUPE9_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "External input pin disabled as wakeup input"]
#[inline(always)]
pub fn _00(self) -> &'a mut W {
self.variant(WUPE9_A::_00)
}
#[doc = "External input pin enabled with rising edge detection"]
#[inline(always)]
pub fn _01(self) -> &'a mut W {
self.variant(WUPE9_A::_01)
}
#[doc = "External input pin enabled with falling edge detection"]
#[inline(always)]
pub fn _10(self) -> &'a mut W {
self.variant(WUPE9_A::_10)
}
#[doc = "External input pin enabled with any change detection"]
#[inline(always)]
pub fn _11(self) -> &'a mut W {
self.variant(WUPE9_A::_11)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u8) & 0x03) << 2);
self.w
}
}
#[doc = "Wakeup Pin Enable For LLWU_P10\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum WUPE10_A {
#[doc = "0: External input pin disabled as wakeup input"]
_00 = 0,
#[doc = "1: External input pin enabled with rising edge detection"]
_01 = 1,
#[doc = "2: External input pin enabled with falling edge detection"]
_10 = 2,
#[doc = "3: External input pin enabled with any change detection"]
_11 = 3,
}
impl From<WUPE10_A> for u8 {
#[inline(always)]
fn from(variant: WUPE10_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `WUPE10`"]
pub type WUPE10_R = crate::R<u8, WUPE10_A>;
impl WUPE10_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WUPE10_A {
match self.bits {
0 => WUPE10_A::_00,
1 => WUPE10_A::_01,
2 => WUPE10_A::_10,
3 => WUPE10_A::_11,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `_00`"]
#[inline(always)]
pub fn is_00(&self) -> bool {
*self == WUPE10_A::_00
}
#[doc = "Checks if the value of the field is `_01`"]
#[inline(always)]
pub fn is_01(&self) -> bool {
*self == WUPE10_A::_01
}
#[doc = "Checks if the value of the field is `_10`"]
#[inline(always)]
pub fn is_10(&self) -> bool {
*self == WUPE10_A::_10
}
#[doc = "Checks if the value of the field is `_11`"]
#[inline(always)]
pub fn is_11(&self) -> bool {
*self == WUPE10_A::_11
}
}
#[doc = "Write proxy for field `WUPE10`"]
pub struct WUPE10_W<'a> {
w: &'a mut W,
}
impl<'a> WUPE10_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: WUPE10_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "External input pin disabled as wakeup input"]
#[inline(always)]
pub fn _00(self) -> &'a mut W {
self.variant(WUPE10_A::_00)
}
#[doc = "External input pin enabled with rising edge detection"]
#[inline(always)]
pub fn _01(self) -> &'a mut W {
self.variant(WUPE10_A::_01)
}
#[doc = "External input pin enabled with falling edge detection"]
#[inline(always)]
pub fn _10(self) -> &'a mut W {
self.variant(WUPE10_A::_10)
}
#[doc = "External input pin enabled with any change detection"]
#[inline(always)]
pub fn _11(self) -> &'a mut W {
self.variant(WUPE10_A::_11)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 4)) | (((value as u8) & 0x03) << 4);
self.w
}
}
#[doc = "Wakeup Pin Enable For LLWU_P11\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum WUPE11_A {
#[doc = "0: External input pin disabled as wakeup input"]
_00 = 0,
#[doc = "1: External input pin enabled with rising edge detection"]
_01 = 1,
#[doc = "2: External input pin enabled with falling edge detection"]
_10 = 2,
#[doc = "3: External input pin enabled with any change detection"]
_11 = 3,
}
impl From<WUPE11_A> for u8 {
#[inline(always)]
fn | (variant: WUPE11_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `WUPE11`"]
pub type WUPE11_R = crate::R<u8, WUPE11_A>;
impl WUPE11_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WUPE11_A {
match self.bits {
0 => WUPE11_A::_00,
1 => WUPE11_A::_01,
2 => WUPE11_A::_10,
3 => WUPE11_A::_11,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `_00`"]
#[inline(always)]
pub fn is_00(&self) -> bool {
*self == WUPE11_A::_00
}
#[doc = "Checks if the value of the field is `_01`"]
#[inline(always)]
pub fn is_01(&self) -> bool {
*self == WUPE11_A::_01
}
#[doc = "Checks if the value of the field is `_10`"]
#[inline(always)]
pub fn is_10(&self) -> bool {
*self == WUPE11_A::_10
}
#[doc = "Checks if the value of the field is `_11`"]
#[inline(always)]
pub fn is_11(&self) -> bool {
*self == WUPE11_A::_11
}
}
#[doc = "Write proxy for field `WUPE11`"]
pub struct WUPE11_W<'a> {
w: &'a mut W,
}
impl<'a> WUPE11_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: WUPE11_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "External input pin disabled as wakeup input"]
#[inline(always)]
pub fn _00(self) -> &'a mut W {
self.variant(WUPE11_A::_00)
}
#[doc = "External input pin enabled with rising edge detection"]
#[inline(always)]
pub fn _01(self) -> &'a mut W {
self.variant(WUPE11_A::_01)
}
#[doc = "External input pin enabled with falling edge detection"]
#[inline(always)]
pub fn _10(self) -> &'a mut W {
self.variant(WUPE11_A::_10)
}
#[doc = "External input pin enabled with any change detection"]
#[inline(always)]
pub fn _11(self) -> &'a mut W {
self.variant(WUPE11_A::_11)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u8) & 0x03) << 6);
self.w
}
}
impl R {
#[doc = "Bits 0:1 - Wakeup Pin Enable For LLWU_P8"]
#[inline(always)]
pub fn wupe8(&self) -> WUPE8_R {
WUPE8_R::new((self.bits & 0x03) as u8)
}
#[doc = "Bits 2:3 - Wakeup Pin Enable For LLWU_P9"]
#[inline(always)]
pub fn wupe9(&self) -> WUPE9_R {
WUPE9_R::new(((self.bits >> 2) & 0x03) as u8)
}
#[doc = "Bits 4:5 - Wakeup Pin Enable For LLWU_P10"]
#[inline(always)]
pub fn wupe10(&self) -> WUPE10_R {
WUPE10_R::new(((self.bits >> 4) & 0x03) as u8)
}
#[doc = "Bits 6:7 - Wakeup Pin Enable For LLWU_P11"]
#[inline(always)]
pub fn wupe11(&self) -> WUPE11_R {
WUPE11_R::new(((self.bits >> 6) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - Wakeup Pin Enable For LLWU_P8"]
#[inline(always)]
pub fn wupe8(&mut self) -> WUPE8_W {
WUPE8_W { w: self }
}
#[doc = "Bits 2:3 - Wakeup Pin Enable For LLWU_P9"]
#[inline(always)]
pub fn wupe9(&mut self) -> WUPE9_W {
WUPE9_W { w: self }
}
#[doc = "Bits 4:5 - Wakeup Pin Enable For LLWU_P10"]
#[inline(always)]
pub fn wupe10(&mut self) -> WUPE10_W {
WUPE10_W { w: self }
}
#[doc = "Bits 6:7 - Wakeup Pin Enable For LLWU_P11"]
#[inline(always)]
pub fn wupe11(&mut self) -> WUPE11_W {
WUPE11_W { w: self }
}
}
| from |
data_structures.py | #!/usr/bin/env python3
from copy import deepcopy
class Vector2D:
def __init__(self, x, y):
self.x = x
self.y = y
def __truediv__(self, other):
answer = deepcopy(self)
answer.x = self.x / other
answer.y = self.y / other
return answer
def __sub__(self, other):
|
def __str__(self):
return "[{0}, {1}]".format(self.x, self.y)
def __eq__(self, other):
return self.x == other.x and self.y == other.y
@staticmethod
def from_list(data: list):
return Vector2D(data[0], data[1])
def as_list(self):
return list([self.x, self.y])
class Point2D(Vector2D):
pass
class Size2D(Vector2D):
pass
| answer = deepcopy(self)
answer.x = self.x - other.x
answer.y = self.y - other.y
return answer |
index.js | import React from 'react';
import ReactDOM from 'react-dom';
import ApolloClient from 'apollo-boost';
import { ApolloProvider } from '@apollo/react-hooks';
import Typography from '@material-ui/core/Typography'; | import * as Sentry from '@sentry/browser';
Sentry.init({dsn: "https://[email protected]/5263557"});
const client = new ApolloClient({
// uri: 'http://localhost:3300/'
uri: 'https://rickandmortyapi.com/graphql/'
})
ReactDOM.render(
<React.StrictMode>
<ApolloProvider client={client}>
<button onClick={() => { throw("this") }} >Break the world</button>
<Typography variant="body2" color="textSecondary" component="p">
RICK AND MORTY
</Typography>
<Grid />
</ApolloProvider>
</React.StrictMode>,
document.getElementById('root')
);
// If you want your app to work offline and load faster, you can change
// unregister() to register() below. Note this comes with some pitfalls.
// Learn more about service workers: https://bit.ly/CRA-PWA
serviceWorker.unregister(); | import './index.css';
// import Board from './Board'
import Grid from './Grid'
import * as serviceWorker from './serviceWorker'; |
adaptor.py | # -*- coding: utf-8 -*-
"""
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-用户管理(Bk-User) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from dataclasses import dataclass, field
from typing import Any, Dict, List, NamedTuple, Optional
from django.utils.encoding import force_str
from ldap3.utils import dn as dn_utils
from bkuser_core.categories.plugins.constants import DYNAMIC_FIELDS_SETTING_KEY
from bkuser_core.categories.plugins.ldap.models import LdapDepartment, LdapUserProfile
from bkuser_core.user_settings.loader import ConfigProvider
logger = logging.getLogger(__name__)
@dataclass
class ProfileFieldMapper:
"""从 ldap 对象属性中获取用户字段"""
config_loader: ConfigProvider
embed_fields = [
"username",
"display_name",
"email",
"telephone",
]
dynamic_fields: List = field(default_factory=list)
def __post_init__(self):
self.dynamic_fields_mapping = self.config_loader.get(DYNAMIC_FIELDS_SETTING_KEY)
self.dynamic_fields = list(self.dynamic_fields_mapping.keys()) if self.dynamic_fields_mapping else []
def get_value(
self, field_name: str, user_meta: Dict[str, List[bytes]], remain_raw: bool = False, dynamic_field: bool = False
) -> Any:
"""通过 field_name 从 ldap 数据中获取具体值"""
# 获取自定义字段对应的属性值 | if ldap_field_name not in self.dynamic_fields_mapping.values():
logger.info("no config[%s] in configs of dynamic_fields_mapping", field_name)
return ""
else:
# 从目录配置中获取 字段名
ldap_field_name = self.config_loader.get(field_name)
if not ldap_field_name:
logger.info("no config[%s] in configs of category", field_name)
return ""
# 1. 通过字段名,获取具体值
if ldap_field_name not in user_meta or not user_meta[ldap_field_name]:
logger.info("field[%s] is missing in raw attributes of user data from ldap", field_name)
return ""
# 2. 类似 memberOf 字段,将会返回原始列表
if remain_raw:
return user_meta[ldap_field_name]
return force_str(user_meta[ldap_field_name][0])
def get_values(self, user_meta: Dict[str, List[bytes]]) -> Dict[str, Any]:
"""根据字段映射关系, 从 ldap 中获取 `field_name` 的值"""
values = {}
for field_name in self.embed_fields:
values.update({field_name: self.get_value(field_name, user_meta)})
return values
def get_dynamic_values(self, user_meta: Dict[str, List[bytes]]) -> Dict[str, Any]:
"""获取自定义字段 在ldap中的对应值"""
values = {}
if self.dynamic_fields:
values.update(
{
field_name: self.get_value(
field_name=self.dynamic_fields_mapping[field_name], user_meta=user_meta, dynamic_field=True
)
for field_name in self.dynamic_fields
}
)
return values
def get_user_attributes(self) -> list:
"""获取远端属性名列表"""
user_attributes = [self.config_loader[x] for x in self.embed_fields if self.config_loader.get(x)]
user_attributes.extend(
[self.dynamic_fields_mapping[x] for x in self.dynamic_fields if self.dynamic_fields_mapping.get(x)]
)
return user_attributes
def user_adapter(
code: str, user_meta: Dict[str, Any], field_mapper: ProfileFieldMapper, restrict_types: List[str]
) -> LdapUserProfile:
groups = field_mapper.get_value("user_member_of", user_meta["raw_attributes"], True) or []
return LdapUserProfile(
**field_mapper.get_values(user_meta["raw_attributes"]),
code=code,
extras=field_mapper.get_dynamic_values(user_meta["raw_attributes"]),
# TODO: 完成转换 departments 的逻辑
departments=[
# 根据约定, dn 中除去第一个成分以外的部分即为用户所在的部门, 因此需要取 [1:]
list(reversed(parse_dn_value_list(user_meta["dn"], restrict_types)[1:])),
# 用户与用户组之间的关系
*[list(reversed(parse_dn_value_list(force_str(group), restrict_types))) for group in groups],
],
)
def department_adapter(code: str, dept_meta: Dict, is_group: bool, restrict_types: List[str]) -> LdapDepartment:
dn = dept_meta["dn"]
dn_values = parse_dn_value_list(dn, restrict_types=restrict_types)
parent_dept: Optional[LdapDepartment] = None
for dept_name in reversed(dn_values):
parent_dept = LdapDepartment(
name=dept_name,
parent=parent_dept,
is_group=is_group,
)
assert parent_dept is not None, "未从 dn 中提取到任何部门信息"
parent_dept.code = code
return parent_dept
class RDN(NamedTuple):
"""RelativeDistinguishedName"""
type: str
value: str
separator: str
def parse_dn_tree(dn: str, restrict_types: List[str] = None) -> List[RDN]:
"""A DN is a sequence of relative distinguished names (RDN) connected by commas, For examples:
we have a dn = "CN=Jeff Smith,OU=Sales,DC=Fabrikam,DC=COM", this method will parse the dn to:
>>> parse_dn_tree("CN=Jeff Smith,OU=Sales,DC=Fabrikam,DC=COM")
[RDN(type='CN', value='Jeff Smith', separator=','),
RDN(type='OU', value='Sales', separator=','),
RDN(type='DC', value='Fabrikam', separator=','),
RDN(type='DC', value='COM', separator='')]
if provide restrict_types, this method will ignore the attribute not in restrict_types, For examples:
>>> parse_dn_tree("CN=Jeff Smith,OU=Sales,DC=Fabrikam,DC=COM", restrict_types=["DC"])
[RDN(type='DC', value='Fabrikam', separator=','), RDN(type='DC', value='COM', separator='')]
Furthermore, restrict_types is Case-insensitive, the ["DC"], ["dc"], ["Dc"] are Exactly equal.
>>> parse_dn_tree("CN=Jeff Smith,OU=Sales,DC=Fabrikam,DC=COM", restrict_types=["dc"])
[RDN(type='DC', value='Fabrikam', separator=','), RDN(type='DC', value='COM', separator='')]
See Also: https://docs.microsoft.com/en-us/previous-versions/windows/desktop/ldap/distinguished-names
"""
restrict_types = [type_.upper() for type_ in (restrict_types or [])]
items = dn_utils.parse_dn(dn, escape=True)
if restrict_types:
parts = [RDN(*i) for i in items if i[0].upper() in restrict_types]
else:
parts = [RDN(*i) for i in items]
return parts
def parse_dn_value_list(dn: str, restrict_types: List[str] = None) -> List[str]:
"""this method work like parse_dn_tree, be only return values of those attributes, For examples:
>>> parse_dn_value_list("CN=Jeff Smith,OU=Sales,DC=Fabrikam,DC=COM")
['Jeff Smith', 'Sales', 'Fabrikam', 'COM']
if provide restrict_types, this method will ignore the attribute not in restrict_types, For examples:
>>> parse_dn_value_list("CN=Jeff Smith,OU=Sales,DC=Fabrikam,DC=COM", restrict_types=["DC"])
['Fabrikam', 'COM']
"""
tree = parse_dn_tree(dn, restrict_types)
parts = []
for part in tree:
parts.append(part.value)
return parts | if dynamic_field:
ldap_field_name = field_name |
as_operand.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! See docs in build/expr/mod.rs
use build::{BlockAnd, BlockAndExtension, Builder};
use build::expr::category::Category;
use hair::*;
use rustc::middle::region::CodeExtent;
use rustc::mir::*;
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
/// Returns an operand suitable for use until the end of the current
/// scope expression.
///
/// The operand returned from this function will *not be valid* after
/// an ExprKind::Scope is passed, so please do *not* return it from
/// functions to avoid bad miscompiles.
pub fn as_local_operand<M>(&mut self, block: BasicBlock, expr: M)
-> BlockAnd<Operand<'tcx>>
where M: Mirror<'tcx, Output = Expr<'tcx>>
{
let topmost_scope = self.topmost_scope(); // FIXME(#6393)
self.as_operand(block, Some(topmost_scope), expr)
}
/// Compile `expr` into a value that can be used as an operand.
/// If `expr` is an lvalue like `x`, this will introduce a
/// temporary `tmp = x`, so that we capture the value of `x` at
/// this time.
///
/// The operand is known to be live until the end of `scope`.
pub fn as_operand<M>(&mut self,
block: BasicBlock,
scope: Option<CodeExtent<'tcx>>,
expr: M) -> BlockAnd<Operand<'tcx>>
where M: Mirror<'tcx, Output = Expr<'tcx>>
{
let expr = self.hir.mirror(expr);
self.expr_as_operand(block, scope, expr)
}
fn expr_as_operand(&mut self,
mut block: BasicBlock,
scope: Option<CodeExtent<'tcx>>,
expr: Expr<'tcx>)
-> BlockAnd<Operand<'tcx>> {
debug!("expr_as_operand(block={:?}, expr={:?})", block, expr);
let this = self;
if let ExprKind::Scope { extent, value } = expr.kind {
return this.in_scope(extent, block, |this| {
this.as_operand(block, scope, value) | debug!("expr_as_operand: category={:?} for={:?}", category, expr.kind);
match category {
Category::Constant => {
let constant = this.as_constant(expr);
block.and(Operand::Constant(box constant))
}
Category::Lvalue |
Category::Rvalue(..) => {
let operand =
unpack!(block = this.as_temp(block, scope, expr));
block.and(Operand::Consume(operand))
}
}
}
} | });
}
let category = Category::of(&expr.kind).unwrap(); |
DeleteProtection.ts | import { DeleteProtectionInput } from "../shapes/DeleteProtectionInput";
import { DeleteProtectionOutput } from "../shapes/DeleteProtectionOutput";
import { InternalErrorException } from "../shapes/InternalErrorException";
import { ResourceNotFoundException } from "../shapes/ResourceNotFoundException";
import { OptimisticLockException } from "../shapes/OptimisticLockException";
import { OperationModel as _Operation_ } from "@aws-sdk/types";
import { ServiceMetadata } from "../ServiceMetadata";
export const DeleteProtection: _Operation_ = {
metadata: ServiceMetadata,
name: "DeleteProtection",
http: {
method: "POST",
requestUri: "/"
},
input: {
shape: DeleteProtectionInput | output: {
shape: DeleteProtectionOutput
},
errors: [
{
shape: InternalErrorException
},
{
shape: ResourceNotFoundException
},
{
shape: OptimisticLockException
}
]
}; | }, |
odata_batch_request.py | import json
import re
from email import message_from_bytes
from email.message import Message
from office365.runtime.client_request import ClientRequest
from office365.runtime.http.http_method import HttpMethod
from office365.runtime.http.request_options import RequestOptions
from office365.runtime.queries.batch_query import BatchQuery, create_boundary
class ODataBatchRequest(ClientRequest):
def __init__(self, context):
super(ODataBatchRequest, self).__init__(context)
def build_request(self, query):
"""
:type query: office365.runtime.queries.client_query.BatchQuery
"""
url = "{0}$batch".format(self.context.service_root_url())
request = RequestOptions(url)
request.method = HttpMethod.Post
media_type = "multipart/mixed"
content_type = "; ".join([media_type, "boundary={0}".format(query.current_boundary)])
request.ensure_header('Content-Type', content_type)
request.data = self._prepare_payload(query).as_bytes()
return request
def process_response(self, response, query):
|
def _read_response(self, response):
"""Parses a multipart/mixed response body from from the position defined by the context.
:type response: requests.Response
"""
content_type = response.headers['Content-Type'].encode("ascii")
http_body = (
b"Content-Type: "
+ content_type
+ b"\r\n\r\n"
+ response.content
)
message = message_from_bytes(http_body) # type: Message
for raw_response in message.get_payload():
if raw_response.get_content_type() == "application/http":
yield self._deserialize_response(raw_response)
def _prepare_payload(self, query):
"""Serializes a batch request body.
:type query BatchQuery
"""
main_message = Message()
main_message.add_header("Content-Type", "multipart/mixed")
main_message.set_boundary(query.current_boundary)
if query.has_change_sets:
change_set_message = Message()
change_set_boundary = create_boundary("changeset_", True)
change_set_message.add_header("Content-Type", "multipart/mixed")
change_set_message.set_boundary(change_set_boundary)
for qry in query.change_sets:
request = qry.build_request()
message = self._serialize_request(request)
change_set_message.attach(message)
main_message.attach(change_set_message)
for qry in query.get_queries:
request = qry.build_request()
message = self._serialize_request(request)
main_message.attach(message)
return main_message
@staticmethod
def _normalize_headers(headers_raw):
headers = {}
for header_line in headers_raw:
k, v = header_line.split(":", 1)
headers[k] = v
return headers
def _deserialize_response(self, raw_response):
response = raw_response.get_payload(decode=True)
lines = list(filter(None, response.decode("utf-8").split("\r\n")))
response_status_regex = "^HTTP/1\\.\\d (\\d{3}) (.*)$"
status_result = re.match(response_status_regex, lines[0])
status_info = status_result.groups()
# validate for errors
if int(status_info[0]) >= 400:
raise ValueError(response)
if status_info[1] == "No Content" or len(lines) < 3:
headers_raw = lines[1:]
return {
"status": status_info,
"headers": self._normalize_headers(headers_raw),
"content": None
}
else:
*headers_raw, content = lines[1:]
content = json.loads(content)
return {
"status": status_info,
"headers": self._normalize_headers(headers_raw),
"content": content
}
@staticmethod
def _serialize_request(request):
"""Serializes a part of a batch request to a string. A part can be either a GET request or
a change set grouping several CUD (create, update, delete) requests.
:type request: RequestOptions
"""
eol = "\r\n"
method = request.method
if "X-HTTP-Method" in request.headers:
method = request.headers["X-HTTP-Method"]
lines = ["{method} {url} HTTP/1.1".format(method=method, url=request.url),
*[':'.join(h) for h in request.headers.items()]]
if request.data:
lines.append(eol)
lines.append(json.dumps(request.data))
buffer = eol + eol.join(lines) + eol
payload = buffer.encode('utf-8').lstrip()
message = Message()
message.add_header("Content-Type", "application/http")
message.add_header("Content-Transfer-Encoding", "binary")
message.set_payload(payload)
return message
| """Parses an HTTP response.
:type response: requests.Response
:type query: office365.runtime.queries.client_query.BatchQuery
"""
content_id = 0
for response_info in self._read_response(response):
if response_info["content"] is not None:
qry = query.get(content_id)
self.context.pending_request().map_json(response_info["content"], qry.return_type)
content_id += 1 |
types.rs | //! Library-specific type definitions |
/// A simple type alias so as to DRY.
pub(crate) type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; |
|
failpoints.rs | // Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
#![recursion_limit = "100"]
#![feature(box_syntax)]
#![cfg_attr(feature = "no-fail", allow(dead_code))]
extern crate fail;
extern crate futures;
extern crate kvproto;
extern crate raft;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate test_raftstore;
extern crate test_storage;
extern crate test_util;
extern crate tikv;
#[cfg(not(feature = "no-fail"))]
mod failpoints_cases;
use std::sync::*;
use std::thread;
use tikv::util::panic_hook;
lazy_static! {
/// Failpoints are global structs, hence rules set in different cases
/// may affect each other. So use a global lock to synchronize them.
static ref LOCK: Mutex<()> = {
test_util::setup_for_ci();
Mutex::new(())
};
}
fn setup<'a>() -> MutexGuard<'a, ()> {
// We don't want a failed test breaks others.
let guard = LOCK.lock().unwrap_or_else(|e| e.into_inner());
fail::teardown();
fail::setup();
guard
}
#[test]
fn test_setup() | {
let _ = thread::spawn(move || {
panic_hook::mute();
let _g = setup();
panic!("Poison!");
}).join();
let _g = setup();
} |
|
auth_error.rs | use thiserror::Error;
/// Enum listing possible authentication error codes.
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum AuthError {
/// An error occured when connecting to or using the database.
#[error("database error")]
DatabaseError(#[from] sqlx::Error),
/// An error occured with the Argon2id hashing implementation.
#[error("hashing error")]
HashingError,
/// If the request was invalid or malformed.
#[error("the request was invalid {0}")]
InvalidRequest(String),
/// An error occured when validating or generating a JWT.
#[error("invalid token")]
InvalidToken(#[from] jsonwebtoken::errors::Error),
/// If the username and password combination did not match when attempting to authenticate.
#[error("invalid username or password")]
InvalidUsernameOrPassword,
/// Any other, unknown error sources.
#[error("{0}")]
Unknown(#[source] Box<dyn std::error::Error + Sync + Send>),
/// If a registration was attempted, but the email address already exists in the database.
#[error("a user with the email {0} already exists")]
UserAlreadyExists(String),
}
/// TODO: documentation
#[allow(clippy::match_same_arms)]
impl From<AuthError> for tonic::Status {
/// TODO: documentation
fn | (auth_error: AuthError) -> Self {
match auth_error {
AuthError::DatabaseError(err) => Self::unknown(format!("{:?}", err)),
AuthError::HashingError => Self::unknown(format!("{:?}", auth_error)),
AuthError::InvalidRequest(err) => Self::unknown(format!("{:?}", err)),
AuthError::InvalidToken(err) => Self::unknown(format!("{:?}", err)),
AuthError::InvalidUsernameOrPassword => Self::unknown(format!("{:?}", auth_error)),
AuthError::Unknown(err) => Self::unknown(format!("{:?}", err)),
AuthError::UserAlreadyExists(err) => Self::unknown(format!("{:?}", err)),
}
}
}
| from |
fields.js | /*
* X3DOM JavaScript Library
* http://www.x3dom.org
*
* (C)2009 Fraunhofer IGD, Darmstadt, Germany
* Dual licensed under the MIT and GPL
*
* Based on code originally provided by
* Philip Taylor: http://philip.html5.org
*/
/** If used as standalone lib, define some basics first. */
if (typeof x3dom === "undefined")
{
/**
* @namespace x3dom
*/
x3dom = {
extend: function(f) {
function | () {}
G.prototype = f.prototype || f;
return new G();
},
debug: {
logInfo: function(msg) { console.log(msg); },
logWarning: function(msg) { console.warn(msg); },
logError: function(msg) { console.error(msg); }
}
};
if (!Array.map) {
Array.map = function(array, fun, thisp) {
var len = array.length;
var res = [];
for (var i = 0; i < len; i++) {
if (i in array) {
res[i] = fun.call(thisp, array[i], i, array);
}
}
return res;
};
}
console.log("Using x3dom fields.js as standalone math and/or base types library.");
}
/**
* The x3dom.fields namespace.
* @namespace x3dom.fields
*/
x3dom.fields = {};
/** shortcut for convenience and speedup */
var VecMath = x3dom.fields;
/** Epsilon */
x3dom.fields.Eps = 0.000001;
///////////////////////////////////////////////////////////////////////////////
// Single-Field Definitions
///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
/**
* Constructor. You must either specify all argument values or no argument. In the latter case, an identity matrix will be created.
*
* @class Represents a 4x4 matrix in row major format.
* @param {Number} [_00=1] - value at [0,0]
* @param {Number} [_01=0] - value at [0,1]
* @param {Number} [_02=0] - value at [0,2]
* @param {Number} [_03=0] - value at [0,3]
* @param {Number} [_10=0] - value at [1,0]
* @param {Number} [_11=1] - value at [1,1]
* @param {Number} [_12=0] - value at [1,2]
* @param {Number} [_13=0] - value at [1,3]
* @param {Number} [_20=0] - value at [2,0]
* @param {Number} [_21=0] - value at [2,1]
* @param {Number} [_22=1] - value at [2,2]
* @param {Number} [_23=0] - value at [2,3]
* @param {Number} [_30=0] - value at [3,0]
* @param {Number} [_31=0] - value at [3,1]
* @param {Number} [_32=0] - value at [3,2]
* @param {Number} [_33=1] - value at [3,3]
*/
x3dom.fields.SFMatrix4f = function( _00, _01, _02, _03,
_10, _11, _12, _13,
_20, _21, _22, _23,
_30, _31, _32, _33)
{
if (arguments.length === 0) {
this._00 = 1; this._01 = 0; this._02 = 0; this._03 = 0;
this._10 = 0; this._11 = 1; this._12 = 0; this._13 = 0;
this._20 = 0; this._21 = 0; this._22 = 1; this._23 = 0;
this._30 = 0; this._31 = 0; this._32 = 0; this._33 = 1;
}
else {
this._00 = _00; this._01 = _01; this._02 = _02; this._03 = _03;
this._10 = _10; this._11 = _11; this._12 = _12; this._13 = _13;
this._20 = _20; this._21 = _21; this._22 = _22; this._23 = _23;
this._30 = _30; this._31 = _31; this._32 = _32; this._33 = _33;
}
};
/**
* Returns the first column vector of the matrix.
* @returns {x3dom.fields.SFVec3f} the vector
*/
x3dom.fields.SFMatrix4f.prototype.e0 = function () {
var baseVec = new x3dom.fields.SFVec3f(this._00, this._10, this._20);
return baseVec.normalize();
};
/**
* Returns the second column vector of the matrix.
* @returns {x3dom.fields.SFVec3f} the vector
*/
x3dom.fields.SFMatrix4f.prototype.e1 = function () {
var baseVec = new x3dom.fields.SFVec3f(this._01, this._11, this._21);
return baseVec.normalize();
};
/**
* Returns the third column vector of the matrix.
* @returns {x3dom.fields.SFVec3f} the vector
*/
x3dom.fields.SFMatrix4f.prototype.e2 = function () {
var baseVec = new x3dom.fields.SFVec3f(this._02, this._12, this._22);
return baseVec.normalize();
};
/**
* Returns the fourth column vector of the matrix.
* @returns {x3dom.fields.SFVec3f} the vector
*/
x3dom.fields.SFMatrix4f.prototype.e3 = function () {
return new x3dom.fields.SFVec3f(this._03, this._13, this._23);
};
/**
* Returns a copy of the argument matrix.
* @param {x3dom.fields.SFMatrix4f} that - the matrix to copy
* @returns {x3dom.fields.SFMatrix4f} the copy
*/
x3dom.fields.SFMatrix4f.copy = function(that) {
return new x3dom.fields.SFMatrix4f(
that._00, that._01, that._02, that._03,
that._10, that._11, that._12, that._13,
that._20, that._21, that._22, that._23,
that._30, that._31, that._32, that._33
);
};
/**
* Returns a copy of the matrix.
* @returns {x3dom.fields.SFMatrix4f} the copy
*/
x3dom.fields.SFMatrix4f.prototype.copy = function() {
return x3dom.fields.SFMatrix4f.copy(this);
};
/**
* Returns a SFMatrix4f identity matrix.
* @returns {x3dom.fields.SFMatrix4f} the new identity matrix
*/
x3dom.fields.SFMatrix4f.identity = function () {
return new x3dom.fields.SFMatrix4f(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
);
};
/**
* Returns a new null matrix.
* @returns {x3dom.fields.SFMatrix4f} the new null matrix
*/
x3dom.fields.SFMatrix4f.zeroMatrix = function () {
return new x3dom.fields.SFMatrix4f(
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0
);
};
/**
* Returns a new translation matrix.
* @param {x3dom.fields.SFVec3f} vec - vector that describes the desired translation
* @returns {x3dom.fields.SFMatrix4f} the new identity matrix
*/
x3dom.fields.SFMatrix4f.translation = function (vec) {
return new x3dom.fields.SFMatrix4f(
1, 0, 0, vec.x,
0, 1, 0, vec.y,
0, 0, 1, vec.z,
0, 0, 0, 1
);
};
/**
* Returns a new rotation matrix , rotating around the x axis.
* @param {x3dom.fields.SFVec3f} a - angle in radians
* @returns {x3dom.fields.SFMatrix4f} the new rotation matrix
*/
x3dom.fields.SFMatrix4f.rotationX = function (a) {
var c = Math.cos(a);
var s = Math.sin(a);
return new x3dom.fields.SFMatrix4f(
1, 0, 0, 0,
0, c, -s, 0,
0, s, c, 0,
0, 0, 0, 1
);
};
/**
* Returns a new rotation matrix , rotating around the y axis.
* @param {x3dom.fields.SFVec3f} a - angle in radians
* @returns {x3dom.fields.SFMatrix4f} the new rotation matrix
*/
x3dom.fields.SFMatrix4f.rotationY = function (a) {
var c = Math.cos(a);
var s = Math.sin(a);
return new x3dom.fields.SFMatrix4f(
c, 0, s, 0,
0, 1, 0, 0,
-s, 0, c, 0,
0, 0, 0, 1
);
};
/**
* Returns a new rotation matrix , rotating around the z axis.
* @param {x3dom.fields.SFVec3f} a - angle in radians
* @returns {x3dom.fields.SFMatrix4f} the new rotation matrix
*/
x3dom.fields.SFMatrix4f.rotationZ = function (a) {
var c = Math.cos(a);
var s = Math.sin(a);
return new x3dom.fields.SFMatrix4f(
c, -s, 0, 0,
s, c, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
);
};
/**
* Returns a new scale matrix.
* @param {x3dom.fields.SFVec3f} vec - vector containing scale factors along the three main axes
* @returns {x3dom.fields.SFMatrix4f} the new scale matrix
*/
x3dom.fields.SFMatrix4f.scale = function (vec) {
return new x3dom.fields.SFMatrix4f(
vec.x, 0, 0, 0,
0, vec.y, 0, 0,
0, 0, vec.z, 0,
0, 0, 0, 1
);
};
/**
* Returns a new camera matrix, using the given "look at" parameters.
* @param {x3dom.fields.SFVec3f} from - eye point
* @param {x3dom.fields.SFVec3f} at - focus ("look at") point
* @param {x3dom.fields.SFVec3f} up - up vector
* @returns {x3dom.fields.SFMatrix4f} the new camera matrix
*/
x3dom.fields.SFMatrix4f.lookAt = function (from, at, up)
{
var view = from.subtract(at).normalize();
var right = up.normalize().cross(view).normalize();
// check if zero vector, i.e. linearly dependent
if (right.dot(right) < x3dom.fields.Eps) {
x3dom.debug.logWarning("View matrix is linearly dependent.");
return x3dom.fields.SFMatrix4f.translation(from);
}
var newUp = view.cross(right).normalize();
var tmp = x3dom.fields.SFMatrix4f.identity();
tmp.setValue(right, newUp, view, from);
return tmp;
};
x3dom.fields.SFMatrix4f.perspectiveFrustum = function(left, right, bottom, top, near, far)
{
return new x3dom.fields.SFMatrix4f(
2*near/(right-left), 0, (right+left)/(right-left), 0,
0, 2*near/(top-bottom), (top+bottom)/(top-bottom), 0,
0, 0, -(far+near)/(far-near), -2*far*near/(far-near),
0, 0, -1, 0
);
};
/**
* Returns a new perspective projection matrix.
* @param {Number} fov - field-of-view angle in radians
* @param {Number} aspect - aspect ratio (width / height)
* @param {Number} near - near clipping distance
* @param {Number} far - far clipping distance
* @returns {x3dom.fields.SFMatrix4f} the new projection matrix
*/
x3dom.fields.SFMatrix4f.perspective = function(fov, aspect, near, far)
{
var f = 1 / Math.tan(fov / 2);
return new x3dom.fields.SFMatrix4f(
f/aspect, 0, 0, 0,
0, f, 0, 0,
0, 0, (near+far)/(near-far), 2*near*far/(near-far),
0, 0, -1, 0
);
};
/**
* Returns a new orthogonal projection matrix.
* @param {Number} left - left border value of the view area
* @param {Number} right - right border value of the view area
* @param {Number} bottom - bottom border value of the view area
* @param {Number} top - top border value of the view area
* @param {Number} near - near clipping distance
* @param {Number} far - far clipping distance
* @param {Number} [aspect=1.0] - desired aspect ratio (width / height) of the projected image
* @returns {x3dom.fields.SFMatrix4f} the new projection matrix
*/
x3dom.fields.SFMatrix4f.ortho = function(left, right, bottom, top, near, far, aspect)
{
var rl = (right - left) / 2; // hs
var tb = (top - bottom) / 2; // vs
var fn = far - near;
if (aspect === undefined)
aspect = 1.0;
if (aspect < (rl / tb))
tb = rl / aspect;
else
rl = tb * aspect;
left = -rl;
right = rl;
bottom = -tb;
top = tb;
rl *= 2;
tb *= 2;
return new x3dom.fields.SFMatrix4f(
2 / rl, 0, 0, -(right+left) / rl,
0, 2 / tb, 0, -(top+bottom) / tb,
0, 0, -2 / fn, -(far+near) / fn,
0, 0, 0, 1
);
};
/**
* Sets the translation components of a homogenous transform matrix.
* @param {x3dom.fields.SFVec3f} vec - the translation vector
*/
x3dom.fields.SFMatrix4f.prototype.setTranslate = function (vec) {
this._03 = vec.x;
this._13 = vec.y;
this._23 = vec.z;
};
/**
* Sets the scale components of a homogenous transform matrix.
* @param {x3dom.fields.SFVec3f} vec - vector containing scale factors along the three main axes
*/
x3dom.fields.SFMatrix4f.prototype.setScale = function (vec) {
this._00 = vec.x;
this._11 = vec.y;
this._22 = vec.z;
};
/**
* Sets the rotation components of a homogenous transform matrix.
* @param {x3dom.fields.Quaternion} quat - quaternion that describes the rotation
*/
x3dom.fields.SFMatrix4f.prototype.setRotate = function (quat) {
var xx = quat.x * quat.x;
var xy = quat.x * quat.y;
var xz = quat.x * quat.z;
var yy = quat.y * quat.y;
var yz = quat.y * quat.z;
var zz = quat.z * quat.z;
var wx = quat.w * quat.x;
var wy = quat.w * quat.y;
var wz = quat.w * quat.z;
this._00 = 1 - 2 * (yy + zz); this._01 = 2 * (xy - wz); this._02 = 2 * (xz + wy);
this._10 = 2 * (xy + wz); this._11 = 1 - 2 * (xx + zz); this._12 = 2 * (yz - wx);
this._20 = 2 * (xz - wy); this._21 = 2 * (yz + wx); this._22 = 1 - 2 * (xx + yy);
};
/**
* Creates a new matrix from a column major string representation, with values separated by commas
* @param {String} str - string to parse
* @return {x3dom.fields.SFMatrix4f} the new matrix
*/
x3dom.fields.SFMatrix4f.parseRotation = function (str) {
var m = /^([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)$/.exec(str);
var x = +m[1], y = +m[2], z = +m[3], a = +m[4];
var d = Math.sqrt(x*x + y*y + z*z);
if (d === 0) {
x = 1; y = z = 0;
} else {
x /= d; y /= d; z /= d;
}
var c = Math.cos(a);
var s = Math.sin(a);
var t = 1 - c;
return new x3dom.fields.SFMatrix4f(
t*x*x+c, t*x*y+s*z, t*x*z-s*y, 0,
t*x*y-s*z, t*y*y+c, t*y*z+s*x, 0,
t*x*z+s*y, t*y*z-s*x, t*z*z+c, 0,
0, 0, 0, 1
).transpose();
};
/**
* Creates a new matrix from a X3D-conformant string representation
* @param {String} str - string to parse
* @return {x3dom.fields.SFMatrix4f} the new rotation matrix
*/
x3dom.fields.SFMatrix4f.parse = function (str) {
var needTranspose = false;
var val = /matrix.*\((.+)\)/;
if (val.exec(str)) {
str = RegExp.$1;
needTranspose = true;
}
var arr = Array.map(str.split(/[,\s]+/), function (n) { return +n; });
if (arr.length >= 16)
{
if (!needTranspose) {
return new x3dom.fields.SFMatrix4f(
arr[0], arr[1], arr[2], arr[3],
arr[4], arr[5], arr[6], arr[7],
arr[8], arr[9], arr[10], arr[11],
arr[12], arr[13], arr[14], arr[15]
);
}
else {
return new x3dom.fields.SFMatrix4f(
arr[0], arr[4], arr[8], arr[12],
arr[1], arr[5], arr[9], arr[13],
arr[2], arr[6], arr[10], arr[14],
arr[3], arr[7], arr[11], arr[15]
);
}
}
else if (arr.length === 6) {
return new x3dom.fields.SFMatrix4f(
arr[0], arr[1], 0, arr[4],
arr[2], arr[3], 0, arr[5],
0, 0, 1, 0,
0, 0, 0, 1
);
}
else {
x3dom.debug.logWarning("SFMatrix4f - can't parse string: " + str);
return x3dom.fields.SFMatrix4f.identity();
}
};
/**
* Returns the result of multiplying this matrix with the given one, using "post-multiplication" / "right multiply".
* @param {x3dom.fields.SFMatrix4f} that - matrix to multiply with this one
* @return {x3dom.fields.SFMatrix4f} resulting matrix
*/
x3dom.fields.SFMatrix4f.prototype.mult = function (that) {
return new x3dom.fields.SFMatrix4f(
this._00*that._00+this._01*that._10+this._02*that._20+this._03*that._30,
this._00*that._01+this._01*that._11+this._02*that._21+this._03*that._31,
this._00*that._02+this._01*that._12+this._02*that._22+this._03*that._32,
this._00*that._03+this._01*that._13+this._02*that._23+this._03*that._33,
this._10*that._00+this._11*that._10+this._12*that._20+this._13*that._30,
this._10*that._01+this._11*that._11+this._12*that._21+this._13*that._31,
this._10*that._02+this._11*that._12+this._12*that._22+this._13*that._32,
this._10*that._03+this._11*that._13+this._12*that._23+this._13*that._33,
this._20*that._00+this._21*that._10+this._22*that._20+this._23*that._30,
this._20*that._01+this._21*that._11+this._22*that._21+this._23*that._31,
this._20*that._02+this._21*that._12+this._22*that._22+this._23*that._32,
this._20*that._03+this._21*that._13+this._22*that._23+this._23*that._33,
this._30*that._00+this._31*that._10+this._32*that._20+this._33*that._30,
this._30*that._01+this._31*that._11+this._32*that._21+this._33*that._31,
this._30*that._02+this._31*that._12+this._32*that._22+this._33*that._32,
this._30*that._03+this._31*that._13+this._32*that._23+this._33*that._33
);
};
/**
* Transforms a given 3D point, using this matrix as a homogenous transform matrix
* (ignores projection part of matrix for speedup in standard cases).
* @param {x3dom.fields.SFVec3f} vec - point to transform
* @return {x3dom.fields.SFVec3f} resulting point
*/
x3dom.fields.SFMatrix4f.prototype.multMatrixPnt = function (vec) {
return new x3dom.fields.SFVec3f(
this._00*vec.x + this._01*vec.y + this._02*vec.z + this._03,
this._10*vec.x + this._11*vec.y + this._12*vec.z + this._13,
this._20*vec.x + this._21*vec.y + this._22*vec.z + this._23
);
};
/**
* Transforms a given 3D vector, using this matrix as a homogenous transform matrix.
* @param {x3dom.fields.SFVec3f} vec - vector to transform
* @return {x3dom.fields.SFVec3f} resulting vector
*/
x3dom.fields.SFMatrix4f.prototype.multMatrixVec = function (vec) {
return new x3dom.fields.SFVec3f(
this._00*vec.x + this._01*vec.y + this._02*vec.z,
this._10*vec.x + this._11*vec.y + this._12*vec.z,
this._20*vec.x + this._21*vec.y + this._22*vec.z
);
};
/**
* Transforms a given 3D point, using this matrix as a transform matrix
* (also includes projection part of matrix - required for e.g. modelview-projection matrix).
* The resulting point is normalized by a w component.
* @param {x3dom.fields.SFVec3f} vec - point to transform
* @return {x3dom.fields.SFVec3f} resulting point
*/
x3dom.fields.SFMatrix4f.prototype.multFullMatrixPnt = function (vec) {
var w = this._30*vec.x + this._31*vec.y + this._32*vec.z + this._33;
if (w) { w = 1.0 / w; }
return new x3dom.fields.SFVec3f(
(this._00*vec.x + this._01*vec.y + this._02*vec.z + this._03) * w,
(this._10*vec.x + this._11*vec.y + this._12*vec.z + this._13) * w,
(this._20*vec.x + this._21*vec.y + this._22*vec.z + this._23) * w
);
};
/**
* Transforms a given 3D point, using this matrix as a transform matrix
* (also includes projection part of matrix - required for e.g. modelview-projection matrix).
* The resulting point is normalized by a w component.
* @param {x3dom.fields.SFVec4f} vec - plane to transform
* @return {x3dom.fields.SFVec4f} resulting plane
*/
x3dom.fields.SFMatrix4f.prototype.multMatrixPlane = function (plane) {
var normal = new x3dom.fields.SFVec3f(plane.x, plane.y, plane.z);
var memberPnt = normal.multiply(-plane.w);
memberPnt = this.multMatrixPnt(memberPnt);
var invTranspose = this.inverse().transpose();
normal = invTranspose.multMatrixVec(normal);
var d = -normal.dot(memberPnt);
return new x3dom.fields.SFVec4f(normal.x, normal.y, normal.z, d);
};
/**
* Returns a transposed version of this matrix.
* @return {x3dom.fields.SFMatrix4f} resulting matrix
*/
x3dom.fields.SFMatrix4f.prototype.transpose = function () {
return new x3dom.fields.SFMatrix4f(
this._00, this._10, this._20, this._30,
this._01, this._11, this._21, this._31,
this._02, this._12, this._22, this._32,
this._03, this._13, this._23, this._33
);
};
/**
* Returns a negated version of this matrix.
* @return {x3dom.fields.SFMatrix4f} resulting matrix
*/
x3dom.fields.SFMatrix4f.prototype.negate = function () {
return new x3dom.fields.SFMatrix4f(
-this._00, -this._01, -this._02, -this._03,
-this._10, -this._11, -this._12, -this._13,
-this._20, -this._21, -this._22, -this._23,
-this._30, -this._31, -this._32, -this._33
);
};
/**
* Returns a scaled version of this matrix.
* @param {Number} s - scale factor
* @return {x3dom.fields.SFMatrix4f} resulting matrix
*/
x3dom.fields.SFMatrix4f.prototype.multiply = function (s) {
return new x3dom.fields.SFMatrix4f(
s*this._00, s*this._01, s*this._02, s*this._03,
s*this._10, s*this._11, s*this._12, s*this._13,
s*this._20, s*this._21, s*this._22, s*this._23,
s*this._30, s*this._31, s*this._32, s*this._33
);
};
/**
* Returns the result of adding the given matrix to this matrix.
* @param {x3dom.fields.SFMatrix4f} that - the other matrix
* @return {x3dom.fields.SFMatrix4f} resulting matrix
*/
x3dom.fields.SFMatrix4f.prototype.add = function (that) {
return new x3dom.fields.SFMatrix4f(
this._00+that._00, this._01+that._01, this._02+that._02, this._03+that._03,
this._10+that._10, this._11+that._11, this._12+that._12, this._13+that._13,
this._20+that._20, this._21+that._21, this._22+that._22, this._23+that._23,
this._30+that._30, this._31+that._31, this._32+that._32, this._33+that._33
);
};
/**
* Returns the result of adding the given matrix to this matrix, using an additional scale factor for the argument matrix.
* @param {x3dom.fields.SFMatrix4f} that - the other matrix
* @param {Number} s - the scale factor
* @return {x3dom.fields.SFMatrix4f} resulting matrix
*/
x3dom.fields.SFMatrix4f.prototype.addScaled = function (that, s) {
return new x3dom.fields.SFMatrix4f(
this._00+s*that._00, this._01+s*that._01, this._02+s*that._02, this._03+s*that._03,
this._10+s*that._10, this._11+s*that._11, this._12+s*that._12, this._13+s*that._13,
this._20+s*that._20, this._21+s*that._21, this._22+s*that._22, this._23+s*that._23,
this._30+s*that._30, this._31+s*that._31, this._32+s*that._32, this._33+s*that._33
);
};
/**
* Fills the values of this matrix with the values of the other one.
* @param {x3dom.fields.SFMatrix4f} that - the other matrix
*/
x3dom.fields.SFMatrix4f.prototype.setValues = function (that) {
this._00 = that._00; this._01 = that._01; this._02 = that._02; this._03 = that._03;
this._10 = that._10; this._11 = that._11; this._12 = that._12; this._13 = that._13;
this._20 = that._20; this._21 = that._21; this._22 = that._22; this._23 = that._23;
this._30 = that._30; this._31 = that._31; this._32 = that._32; this._33 = that._33;
};
/**
* Fills the upper left 3x3 or 3x4 values of this matrix, using the given (three or four) column vectors.
* @param {x3dom.fields.SFVec3f} v1 - first column vector
* @param {x3dom.fields.SFVec3f} v2 - second column vector
* @param {x3dom.fields.SFVec3f} v3 - third column vector
* @param {x3dom.fields.SFVec3f} [v4=undefined] - fourth column vector
*/
x3dom.fields.SFMatrix4f.prototype.setValue = function (v1, v2, v3, v4) {
this._00 = v1.x; this._01 = v2.x; this._02 = v3.x;
this._10 = v1.y; this._11 = v2.y; this._12 = v3.y;
this._20 = v1.z; this._21 = v2.z; this._22 = v3.z;
this._30 = 0; this._31 = 0; this._32 = 0;
if (arguments.length > 3) {
this._03 = v4.x;
this._13 = v4.y;
this._23 = v4.z;
this._33 = 1;
}
};
/**
* Fills the values of this matrix, using the given array.
* @param {Number[]} a - array, the first 16 values will be used to initialize the matrix
*/
x3dom.fields.SFMatrix4f.prototype.setFromArray = function (a) {
this._00 = a[0]; this._01 = a[4]; this._02 = a[ 8]; this._03 = a[12];
this._10 = a[1]; this._11 = a[5]; this._12 = a[ 9]; this._13 = a[13];
this._20 = a[2]; this._21 = a[6]; this._22 = a[10]; this._23 = a[14];
this._30 = a[3]; this._31 = a[7]; this._32 = a[11]; this._33 = a[15];
};
/**
* Returns a column major version of this matrix, packed into a single array.
* @returns {Number[]} resulting array of 16 values
*/
x3dom.fields.SFMatrix4f.prototype.toGL = function () {
return [
this._00, this._10, this._20, this._30,
this._01, this._11, this._21, this._31,
this._02, this._12, this._22, this._32,
this._03, this._13, this._23, this._33
];
};
/**
* Returns the value of this matrix at a given position.
* @param {Number} i - row index (starting with 0)
* @param {Number} j - column index (starting with 0)
* @returns {Number} the value
*/
x3dom.fields.SFMatrix4f.prototype.at = function (i, j) {
var field = "_" + i + j;
return this[field];
};
/**
* Computes the square root of the matrix, assuming that its determinant is greater than zero.
* @return {SFMatrix4f} a matrix containing the result
*/
x3dom.fields.SFMatrix4f.prototype.sqrt = function () {
var Y = x3dom.fields.SFMatrix4f.identity();
var result = x3dom.fields.SFMatrix4f.copy(this);
for (var i=0; i<6; i++)
{
var iX = result.inverse();
var iY = (i == 0) ? x3dom.fields.SFMatrix4f.identity() : Y.inverse();
var rd = result.det(), yd = Y.det();
var g = Math.abs( Math.pow(rd * yd, -0.125) );
var ig = 1.0 / g;
result = result.multiply(g);
result = result.addScaled(iY, ig);
result = result.multiply(0.5);
Y = Y.multiply(g);
Y = Y.addScaled(iX, ig);
Y = Y.multiply(0.5);
}
return result;
};
/**
* Returns the largest absolute value of all entries in the matrix.
* This is only a helper for calculating log and not the usual Infinity-norm for matrices.
* @returns {Number} the largest absolute value
*/
x3dom.fields.SFMatrix4f.prototype.normInfinity = function () {
var t = 0, m = 0;
if ((t = Math.abs(this._00)) > m) {
m = t;
}
if ((t = Math.abs(this._01)) > m) {
m = t;
}
if ((t = Math.abs(this._02)) > m) {
m = t;
}
if ((t = Math.abs(this._03)) > m) {
m = t;
}
if ((t = Math.abs(this._10)) > m) {
m = t;
}
if ((t = Math.abs(this._11)) > m) {
m = t;
}
if ((t = Math.abs(this._12)) > m) {
m = t;
}
if ((t = Math.abs(this._13)) > m) {
m = t;
}
if ((t = Math.abs(this._20)) > m) {
m = t;
}
if ((t = Math.abs(this._21)) > m) {
m = t;
}
if ((t = Math.abs(this._22)) > m) {
m = t;
}
if ((t = Math.abs(this._23)) > m) {
m = t;
}
if ((t = Math.abs(this._30)) > m) {
m = t;
}
if ((t = Math.abs(this._31)) > m) {
m = t;
}
if ((t = Math.abs(this._32)) > m) {
m = t;
}
if ((t = Math.abs(this._33)) > m) {
m = t;
}
return m;
};
/**
* Returns the 1-norm of the upper left 3x3 part of this matrix.
* The 1-norm is also known as maximum absolute column sum norm.
* @returns {Number} the resulting number
*/
x3dom.fields.SFMatrix4f.prototype.norm1_3x3 = function() {
var max = Math.abs(this._00) +
Math.abs(this._10) +
Math.abs(this._20);
var t = 0;
if ((t = Math.abs(this._01) +
Math.abs(this._11) +
Math.abs(this._21)) > max) {
max = t;
}
if ((t = Math.abs(this._02) +
Math.abs(this._12) +
Math.abs(this._22)) > max) {
max = t;
}
return max;
};
/**
* Returns the infinity-norm of the upper left 3x3 part of this matrix.
* The infinity-norm is also known as maximum absolute row sum norm.
* @returns {Number} the resulting number
*/
x3dom.fields.SFMatrix4f.prototype.normInf_3x3 = function() {
var max = Math.abs(this._00) +
Math.abs(this._01) +
Math.abs(this._02);
var t = 0;
if ((t = Math.abs(this._10) +
Math.abs(this._11) +
Math.abs(this._12)) > max) {
max = t;
}
if ((t = Math.abs(this._20) +
Math.abs(this._21) +
Math.abs(this._22)) > max) {
max = t;
}
return max;
};
/**
* Computes the transposed adjoint of the upper left 3x3 part of this matrix,
* and stores it in the upper left part of a new 4x4 identity matrix.
* @returns {x3dom.fields.SFMatrix4f} the resulting matrix
*/
x3dom.fields.SFMatrix4f.prototype.adjointT_3x3 = function () {
var result = x3dom.fields.SFMatrix4f.identity();
result._00 = this._11 * this._22 - this._12 * this._21;
result._01 = this._12 * this._20 - this._10 * this._22;
result._02 = this._10 * this._21 - this._11 * this._20;
result._10 = this._21 * this._02 - this._22 * this._01;
result._11 = this._22 * this._00 - this._20 * this._02;
result._12 = this._20 * this._01 - this._21 * this._00;
result._20 = this._01 * this._12 - this._02 * this._11;
result._21 = this._02 * this._10 - this._00 * this._12;
result._22 = this._00 * this._11 - this._01 * this._10;
return result;
};
/**
* Checks whether this matrix equals another matrix.
* @param {x3dom.fields.SFMatrix4f} that - the other matrix
* @returns {Boolean}
*/
x3dom.fields.SFMatrix4f.prototype.equals = function (that) {
var eps = 0.000000000001;
return Math.abs(this._00-that._00) < eps && Math.abs(this._01-that._01) < eps &&
Math.abs(this._02-that._02) < eps && Math.abs(this._03-that._03) < eps &&
Math.abs(this._10-that._10) < eps && Math.abs(this._11-that._11) < eps &&
Math.abs(this._12-that._12) < eps && Math.abs(this._13-that._13) < eps &&
Math.abs(this._20-that._20) < eps && Math.abs(this._21-that._21) < eps &&
Math.abs(this._22-that._22) < eps && Math.abs(this._23-that._23) < eps &&
Math.abs(this._30-that._30) < eps && Math.abs(this._31-that._31) < eps &&
Math.abs(this._32-that._32) < eps && Math.abs(this._33-that._33) < eps;
};
/**
* Decomposes the matrix into a translation, rotation, scale,
* and scale orientation. Any projection information is discarded.
* The decomposition depends upon choice of center point for rotation and scaling,
* which is optional as the last parameter.
* @param {x3dom.fields.SFVec3f} translation - 3D vector to be filled with the translation values
* @param {x3dom.fields.Quaternion} rotation - quaternion to be filled with the rotation values
* @param {x3dom.fields.SFVec3f} scaleFactor - 3D vector to be filled with the scale factors
* @param {x3dom.fields.Quaternion} scaleOrientation - rotation (quaternion) to be applied before scaling
* @param {x3dom.fields.SFVec3f} [center=undefined] - center point for rotation and scaling, if not origin
*/
x3dom.fields.SFMatrix4f.prototype.getTransform = function(
translation, rotation, scaleFactor, scaleOrientation, center)
{
var m = null;
if (arguments.length > 4) {
m = x3dom.fields.SFMatrix4f.translation(center.negate());
m = m.mult(this);
var c = x3dom.fields.SFMatrix4f.translation(center);
m = m.mult(c);
}
else {
m = x3dom.fields.SFMatrix4f.copy(this);
}
var flip = m.decompose(translation, rotation, scaleFactor, scaleOrientation);
scaleFactor.setValues(scaleFactor.multiply(flip));
};
/**
* Computes the decomposition of the given 4x4 affine matrix M as M = T F R SO S SO^t,
* where T is a translation matrix, F is +/- I (a reflection), R is a rotation matrix,
* SO is a rotation matrix and S is a (nonuniform) scale matrix.
* @param {x3dom.fields.SFVec3f} t - 3D vector to be filled with the translation values
* @param {x3dom.fields.Quaternion} r - quaternion to be filled with the rotation values
* @param {x3dom.fields.SFVec3f} s - 3D vector to be filled with the scale factors
* @param {x3dom.fields.Quaternion} so - rotation (quaternion) to be applied before scaling
* @returns {Number} signum of determinant of the transposed adjoint upper 3x3 matrix
*/
x3dom.fields.SFMatrix4f.prototype.decompose = function(t, r, s, so)
{
var A = x3dom.fields.SFMatrix4f.copy(this);
var Q = x3dom.fields.SFMatrix4f.identity(),
S = x3dom.fields.SFMatrix4f.identity(),
SO = x3dom.fields.SFMatrix4f.identity();
t.x = A._03;
t.y = A._13;
t.z = A._23;
A._03 = 0.0;
A._13 = 0.0;
A._23 = 0.0;
A._30 = 0.0;
A._31 = 0.0;
A._32 = 0.0;
var det = A.polarDecompose(Q, S);
var f = 1.0;
if (det < 0.0) {
Q = Q.negate();
f = -1.0;
}
r.setValue(Q);
S.spectralDecompose(SO, s);
so.setValue(SO);
return f;
};
/**
* Performs a polar decomposition of this matrix A into two matrices Q and S, so that A = QS
* @param {x3dom.fields.SFMatrix4f} Q - first resulting matrix
* @param {x3dom.fields.SFMatrix4f} S - first resulting matrix
* @returns {Number} determinant of the transposed adjoint upper 3x3 matrix
*/
x3dom.fields.SFMatrix4f.prototype.polarDecompose = function(Q, S)
{
var TOL = 0.000000000001;
var Mk = this.transpose();
var Ek = x3dom.fields.SFMatrix4f.identity();
var Mk_one = Mk.norm1_3x3();
var Mk_inf = Mk.normInf_3x3();
var MkAdjT;
var MkAdjT_one, MkAdjT_inf;
var Ek_one, Mk_det;
do
{
// compute transpose of adjoint
MkAdjT = Mk.adjointT_3x3();
// Mk_det = det(Mk) -- computed from the adjoint
Mk_det = Mk._00 * MkAdjT._00 +
Mk._01 * MkAdjT._01 +
Mk._02 * MkAdjT._02;
//TODO: should this be a close to zero test ?
if (Mk_det == 0.0)
{
x3dom.debug.logWarning("polarDecompose: Mk_det == 0.0");
break;
}
MkAdjT_one = MkAdjT.norm1_3x3();
MkAdjT_inf = MkAdjT.normInf_3x3();
// compute update factors
var gamma = Math.sqrt( Math.sqrt((MkAdjT_one * MkAdjT_inf) /
(Mk_one * Mk_inf)) / Math.abs(Mk_det) );
var g1 = 0.5 * gamma;
var g2 = 0.5 / (gamma * Mk_det);
Ek.setValues(Mk);
Mk = Mk.multiply (g1); // this does:
Mk = Mk.addScaled(MkAdjT, g2); // Mk = g1 * Mk + g2 * MkAdjT
Ek = Ek.addScaled(Mk, -1.0); // Ek -= Mk;
Ek_one = Ek.norm1_3x3();
Mk_one = Mk.norm1_3x3();
Mk_inf = Mk.normInf_3x3();
} while (Ek_one > (Mk_one * TOL));
Q.setValues(Mk.transpose());
S.setValues(Mk.mult(this));
for (var i = 0; i < 3; ++i)
{
for (var j = i; j < 3; ++j)
{
S['_'+j+i] = 0.5 * (S['_'+j+i] + S['_'+i+j]);
S['_'+i+j] = 0.5 * (S['_'+j+i] + S['_'+i+j]);
}
}
return Mk_det;
};
/**
* Performs a spectral decomposition of this matrix.
* @param {x3dom.fields.SFMatrix4f} SO - resulting matrix
* @param {x3dom.fields.SFVec3f} k - resulting vector
*/
x3dom.fields.SFMatrix4f.prototype.spectralDecompose = function(SO, k)
{
var next = [1, 2, 0];
var maxIterations = 20;
var diag = [this._00, this._11, this._22];
var offDiag = [this._12, this._20, this._01];
for (var iter = 0; iter < maxIterations; ++iter)
{
var sm = Math.abs(offDiag[0]) + Math.abs(offDiag[1]) + Math.abs(offDiag[2]);
if (sm == 0) {
break;
}
for (var i = 2; i >= 0; --i)
{
var p = next[i];
var q = next[p];
var absOffDiag = Math.abs(offDiag[i]);
var g = 100.0 * absOffDiag;
if (absOffDiag > 0.0)
{
var t = 0, h = diag[q] - diag[p];
var absh = Math.abs(h);
if (absh + g == absh)
{
t = offDiag[i] / h;
}
else
{
var theta = 0.5 * h / offDiag[i];
t = 1.0 / (Math.abs(theta) + Math.sqrt(theta * theta + 1.0));
t = theta < 0.0 ? -t : t;
}
var c = 1.0 / Math.sqrt(t * t + 1.0);
var s = t * c;
var tau = s / (c + 1.0);
var ta = t * offDiag[i];
offDiag[i] = 0.0;
diag[p] -= ta;
diag[q] += ta;
var offDiagq = offDiag[q];
offDiag[q] -= s * (offDiag[p] + tau * offDiagq);
offDiag[p] += s * (offDiagq - tau * offDiag[p]);
for (var j = 2; j >= 0; --j)
{
var a = SO['_'+j+p];
var b = SO['_'+j+q];
SO['_'+j+p] -= s * (b + tau * a);
SO['_'+j+q] += s * (a - tau * b);
}
}
}
}
k.x = diag[0];
k.y = diag[1];
k.z = diag[2];
};
/**
* Computes the logarithm of this matrix, assuming that its determinant is greater than zero.
* @returns {x3dom.fields.SFMatrix4f} log of matrix
*/
x3dom.fields.SFMatrix4f.prototype.log = function () {
var maxiter = 12;
var eps = 1e-12;
var A = x3dom.fields.SFMatrix4f.copy(this),
Z = x3dom.fields.SFMatrix4f.copy(this);
// Take repeated square roots to reduce spectral radius
Z._00 -= 1;
Z._11 -= 1;
Z._22 -= 1;
Z._33 -= 1;
var k = 0;
while (Z.normInfinity() > 0.5)
{
A = A.sqrt();
Z.setValues(A);
Z._00 -= 1;
Z._11 -= 1;
Z._22 -= 1;
Z._33 -= 1;
k++;
}
A._00 -= 1;
A._11 -= 1;
A._22 -= 1;
A._33 -= 1;
A = A.negate();
Z.setValues(A);
var result = x3dom.fields.SFMatrix4f.copy(A);
var i = 1;
while (Z.normInfinity() > eps && i < maxiter)
{
Z = Z.mult(A);
i++;
result = result.addScaled(Z, 1.0 / i);
}
return result.multiply( -(1 << k) );
};
/**
* Computes the exponential of this matrix.
* @returns {x3dom.fields.SFMatrix4f} exp of matrix
*/
x3dom.fields.SFMatrix4f.prototype.exp = function () {
var q = 6;
var A = x3dom.fields.SFMatrix4f.copy(this),
D = x3dom.fields.SFMatrix4f.identity(),
N = x3dom.fields.SFMatrix4f.identity(),
result = x3dom.fields.SFMatrix4f.identity();
var k = 0, c = 1.0;
var j = 1.0 + parseInt(Math.log(A.normInfinity() / 0.693));
//var j = 1.0 + (Math.log(A.normInfinity() / 0.693) | 0);
if (j < 0) {
j = 0;
}
A = A.multiply(1.0 / (1 << j));
for (k = 1; k <= q; k++)
{
c *= (q - k + 1) / (k * (2 * q - k + 1));
result = A.mult(result);
N = N.addScaled(result, c);
if (k % 2) {
D = D.addScaled(result, -c);
}
else {
D = D.addScaled(result, c);
}
}
result = D.inverse().mult(N);
for (k = 0; k < j; k++)
{
result = result.mult(result);
}
return result;
};
/**
* Computes a determinant for a 3x3 matrix m, given as values in row major order.
* @param {Number} a1 - value of m at (0,0)
* @param {Number} a2 - value of m at (0,1)
* @param {Number} a3 - value of m at (0,2)
* @param {Number} b1 - value of m at (1,0)
* @param {Number} b2 - value of m at (1,1)
* @param {Number} b3 - value of m at (1,2)
* @param {Number} c1 - value of m at (2,0)
* @param {Number} c2 - value of m at (2,1)
* @param {Number} c3 - value of m at (2,2)
* @returns {Number} determinant
*/
x3dom.fields.SFMatrix4f.prototype.det3 = function (a1, a2, a3, b1, b2, b3, c1, c2, c3) {
return ((a1 * b2 * c3) + (a2 * b3 * c1) + (a3 * b1 * c2) -
(a1 * b3 * c2) - (a2 * b1 * c3) - (a3 * b2 * c1));
};
/**
* Computes the determinant of this matrix.
* @returns {Number} determinant
*/
x3dom.fields.SFMatrix4f.prototype.det = function () {
var a1 = this._00;
var b1 = this._10;
var c1 = this._20;
var d1 = this._30;
var a2 = this._01;
var b2 = this._11;
var c2 = this._21;
var d2 = this._31;
var a3 = this._02;
var b3 = this._12;
var c3 = this._22;
var d3 = this._32;
var a4 = this._03;
var b4 = this._13;
var c4 = this._23;
var d4 = this._33;
return (a1 * this.det3(b2, b3, b4, c2, c3, c4, d2, d3, d4) -
b1 * this.det3(a2, a3, a4, c2, c3, c4, d2, d3, d4) +
c1 * this.det3(a2, a3, a4, b2, b3, b4, d2, d3, d4) -
d1 * this.det3(a2, a3, a4, b2, b3, b4, c2, c3, c4));
};
/**
* Computes the inverse of this matrix, given that it is not singular.
* @returns {x3dom.fields.SFMatrix4f}
*/
x3dom.fields.SFMatrix4f.prototype.inverse = function () {
var a1 = this._00;
var b1 = this._10;
var c1 = this._20;
var d1 = this._30;
var a2 = this._01;
var b2 = this._11;
var c2 = this._21;
var d2 = this._31;
var a3 = this._02;
var b3 = this._12;
var c3 = this._22;
var d3 = this._32;
var a4 = this._03;
var b4 = this._13;
var c4 = this._23;
var d4 = this._33;
var rDet = this.det();
//if (Math.abs(rDet) < 1e-30)
if (rDet == 0)
{
x3dom.debug.logWarning("Invert matrix: singular matrix, no inverse!");
return x3dom.fields.SFMatrix4f.identity();
}
rDet = 1.0 / rDet;
return new x3dom.fields.SFMatrix4f(
+this.det3(b2, b3, b4, c2, c3, c4, d2, d3, d4) * rDet,
-this.det3(a2, a3, a4, c2, c3, c4, d2, d3, d4) * rDet,
+this.det3(a2, a3, a4, b2, b3, b4, d2, d3, d4) * rDet,
-this.det3(a2, a3, a4, b2, b3, b4, c2, c3, c4) * rDet,
-this.det3(b1, b3, b4, c1, c3, c4, d1, d3, d4) * rDet,
+this.det3(a1, a3, a4, c1, c3, c4, d1, d3, d4) * rDet,
-this.det3(a1, a3, a4, b1, b3, b4, d1, d3, d4) * rDet,
+this.det3(a1, a3, a4, b1, b3, b4, c1, c3, c4) * rDet,
+this.det3(b1, b2, b4, c1, c2, c4, d1, d2, d4) * rDet,
-this.det3(a1, a2, a4, c1, c2, c4, d1, d2, d4) * rDet,
+this.det3(a1, a2, a4, b1, b2, b4, d1, d2, d4) * rDet,
-this.det3(a1, a2, a4, b1, b2, b4, c1, c2, c4) * rDet,
-this.det3(b1, b2, b3, c1, c2, c3, d1, d2, d3) * rDet,
+this.det3(a1, a2, a3, c1, c2, c3, d1, d2, d3) * rDet,
-this.det3(a1, a2, a3, b1, b2, b3, d1, d2, d3) * rDet,
+this.det3(a1, a2, a3, b1, b2, b3, c1, c2, c3) * rDet
);
};
/**
* Returns an array of 2*3 = 6 euler angles (in radians), assuming that this is a rotation matrix.
* The first three and the second three values are alternatives for the three euler angles,
* where each of the two cases leads to the same resulting rotation.
* @returns {Number[]}
*/
x3dom.fields.SFMatrix4f.prototype.getEulerAngles = function() {
var theta_1, theta_2, theta;
var phi_1, phi_2, phi;
var psi_1, psi_2, psi;
var cos_theta_1, cos_theta_2;
if ( Math.abs((Math.abs(this._20) - 1.0)) > 0.0001) {
theta_1 = -Math.asin(this._20);
theta_2 = Math.PI - theta_1;
cos_theta_1 = Math.cos(theta_1);
cos_theta_2 = Math.cos(theta_2);
psi_1 = Math.atan2(this._21 / cos_theta_1, this._22 / cos_theta_1);
psi_2 = Math.atan2(this._21 / cos_theta_2, this._22 / cos_theta_2);
phi_1 = Math.atan2(this._10 / cos_theta_1, this._00 / cos_theta_1);
phi_2 = Math.atan2(this._10 / cos_theta_2, this._00 / cos_theta_2);
return [psi_1, theta_1, phi_1,
psi_2, theta_2, phi_2];
}
else {
phi = 0;
if (this._20 == -1.0) {
theta = Math.PI / 2.0;
psi = phi + Math.atan2(this._01, this._02);
}
else {
theta = -(Math.PI / 2.0);
psi = -phi + Math.atan2(-this._01, -this._02);
}
return [psi, theta, phi,
psi, theta, phi];
}
};
/**
* Converts this matrix to a string representation, where all entries are separated by commas,
* and where rows are additionally separated by linebreaks.
* @returns {String}
*/
x3dom.fields.SFMatrix4f.prototype.toString = function () {
return '\n' +
this._00.toFixed(6)+', '+this._01.toFixed(6)+', '+
this._02.toFixed(6)+', '+this._03.toFixed(6)+', \n'+
this._10.toFixed(6)+', '+this._11.toFixed(6)+', '+
this._12.toFixed(6)+', '+this._13.toFixed(6)+', \n'+
this._20.toFixed(6)+', '+this._21.toFixed(6)+', '+
this._22.toFixed(6)+', '+this._23.toFixed(6)+', \n'+
this._30.toFixed(6)+', '+this._31.toFixed(6)+', '+
this._32.toFixed(6)+', '+this._33.toFixed(6);
};
/**
* Fills the values of this matrix from a string, where the entries are separated
* by commas and given in column-major order.
* @param {String} str - the string representation
*/
x3dom.fields.SFMatrix4f.prototype.setValueByStr = function(str) {
var needTranspose = false;
var val = /matrix.*\((.+)\)/;
// check if matrix is set via CSS string
if (val.exec(str)) {
str = RegExp.$1;
needTranspose = true;
}
var arr = Array.map(str.split(/[,\s]+/), function (n) { return +n; });
if (arr.length >= 16)
{
if (!needTranspose) {
this._00 = arr[0]; this._01 = arr[1]; this._02 = arr[2]; this._03 = arr[3];
this._10 = arr[4]; this._11 = arr[5]; this._12 = arr[6]; this._13 = arr[7];
this._20 = arr[8]; this._21 = arr[9]; this._22 = arr[10]; this._23 = arr[11];
this._30 = arr[12]; this._31 = arr[13]; this._32 = arr[14]; this._33 = arr[15];
}
else {
this._00 = arr[0]; this._01 = arr[4]; this._02 = arr[8]; this._03 = arr[12];
this._10 = arr[1]; this._11 = arr[5]; this._12 = arr[9]; this._13 = arr[13];
this._20 = arr[2]; this._21 = arr[6]; this._22 = arr[10]; this._23 = arr[14];
this._30 = arr[3]; this._31 = arr[7]; this._32 = arr[11]; this._33 = arr[15];
}
}
else if (arr.length === 6) {
this._00 = arr[0]; this._01 = arr[1]; this._02 = 0; this._03 = arr[4];
this._10 = arr[2]; this._11 = arr[3]; this._12 = 0; this._13 = arr[5];
this._20 = 0; this._21 = 0; this._22 = 1; this._23 = 0;
this._30 = 0; this._31 = 0; this._32 = 0; this._33 = 1;
}
else {
x3dom.debug.logWarning("SFMatrix4f - can't parse string: " + str);
}
return this;
};
///////////////////////////////////////////////////////////////////////////////
/** SFVec2f constructor.
@class Represents a SFVec2f
*/
x3dom.fields.SFVec2f = function(x, y) {
if (arguments.length === 0) {
this.x = 0;
this.y = 0;
}
else {
this.x = x;
this.y = y;
}
};
x3dom.fields.SFVec2f.copy = function(v) {
return new x3dom.fields.SFVec2f(v.x, v.y);
};
x3dom.fields.SFVec2f.parse = function (str) {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
return new x3dom.fields.SFVec2f(+m[1], +m[2]);
};
x3dom.fields.SFVec2f.prototype.copy = function() {
return x3dom.fields.SFVec2f.copy(this);
};
x3dom.fields.SFVec2f.prototype.setValues = function (that) {
this.x = that.x;
this.y = that.y;
};
x3dom.fields.SFVec2f.prototype.at = function (i) {
switch(i) {
case 0: return this.x;
case 1: return this.y;
default: return this.x;
}
};
x3dom.fields.SFVec2f.prototype.add = function (that) {
return new x3dom.fields.SFVec2f(this.x+that.x, this.y+that.y);
};
x3dom.fields.SFVec2f.prototype.subtract = function (that) {
return new x3dom.fields.SFVec2f(this.x-that.x, this.y-that.y);
};
x3dom.fields.SFVec2f.prototype.negate = function () {
return new x3dom.fields.SFVec2f(-this.x, -this.y);
};
x3dom.fields.SFVec2f.prototype.dot = function (that) {
return this.x * that.x + this.y * that.y;
};
x3dom.fields.SFVec2f.prototype.reflect = function (n) {
var d2 = this.dot(n)*2;
return new x3dom.fields.SFVec2f(this.x-d2*n.x, this.y-d2*n.y);
};
x3dom.fields.SFVec2f.prototype.normalize = function() {
var n = this.length();
if (n) { n = 1.0 / n; }
return new x3dom.fields.SFVec2f(this.x*n, this.y*n);
};
x3dom.fields.SFVec2f.prototype.multComponents = function (that) {
return new x3dom.fields.SFVec2f(this.x*that.x, this.y*that.y);
};
x3dom.fields.SFVec2f.prototype.multiply = function (n) {
return new x3dom.fields.SFVec2f(this.x*n, this.y*n);
};
x3dom.fields.SFVec2f.prototype.divide = function (n) {
var denom = n ? (1.0 / n) : 1.0;
return new x3dom.fields.SFVec2f(this.x*denom, this.y*denom);
};
x3dom.fields.SFVec2f.prototype.equals = function (that, eps) {
return Math.abs(this.x - that.x) < eps &&
Math.abs(this.y - that.y) < eps;
};
x3dom.fields.SFVec2f.prototype.length = function() {
return Math.sqrt((this.x*this.x) + (this.y*this.y));
};
x3dom.fields.SFVec2f.prototype.toGL = function () {
return [ this.x, this.y ];
};
x3dom.fields.SFVec2f.prototype.toString = function () {
return this.x + " " + this.y;
};
x3dom.fields.SFVec2f.prototype.setValueByStr = function(str) {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
this.x = +m[1];
this.y = +m[2];
return this;
};
///////////////////////////////////////////////////////////////////////////////
/** SFVec3f constructor.
@class Represents a SFVec3f
*/
x3dom.fields.SFVec3f = function(x, y, z) {
if (arguments.length === 0) {
this.x = 0;
this.y = 0;
this.z = 0;
}
else {
this.x = x;
this.y = y;
this.z = z;
}
};
x3dom.fields.SFVec3f.NullVector = new x3dom.fields.SFVec3f(0, 0, 0);
x3dom.fields.SFVec3f.OneVector = new x3dom.fields.SFVec3f(1, 1, 1);
x3dom.fields.SFVec3f.copy = function(v) {
return new x3dom.fields.SFVec3f(v.x, v.y, v.z);
};
x3dom.fields.SFVec3f.MIN = function() {
return new x3dom.fields.SFVec3f(-Number.MAX_VALUE, -Number.MAX_VALUE, -Number.MAX_VALUE);
};
x3dom.fields.SFVec3f.MAX = function() {
return new x3dom.fields.SFVec3f(Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE);
};
x3dom.fields.SFVec3f.parse = function (str) {
try {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
return new x3dom.fields.SFVec3f(+m[1], +m[2], +m[3]);
}
catch (e) {
// allow automatic type conversion as is convenient for shaders
var c = x3dom.fields.SFColor.colorParse(str);
return new x3dom.fields.SFVec3f(c.r, c.g, c.b);
}
};
x3dom.fields.SFVec3f.prototype.copy = function() {
return x3dom.fields.SFVec3f.copy(this);
};
x3dom.fields.SFVec3f.prototype.setValues = function (that) {
this.x = that.x;
this.y = that.y;
this.z = that.z;
};
x3dom.fields.SFVec3f.prototype.at = function (i) {
switch(i) {
case 0: return this.x;
case 1: return this.y;
case 2: return this.z;
default: return this.x;
}
};
x3dom.fields.SFVec3f.prototype.add = function (that) {
return new x3dom.fields.SFVec3f(this.x + that.x, this.y + that.y, this.z + that.z);
};
x3dom.fields.SFVec3f.prototype.addScaled = function (that, s) {
return new x3dom.fields.SFVec3f(this.x + s*that.x, this.y + s*that.y, this.z + s*that.z);
};
x3dom.fields.SFVec3f.prototype.subtract = function (that) {
return new x3dom.fields.SFVec3f(this.x - that.x, this.y - that.y, this.z - that.z);
};
x3dom.fields.SFVec3f.prototype.negate = function () {
return new x3dom.fields.SFVec3f(-this.x, -this.y, -this.z);
};
x3dom.fields.SFVec3f.prototype.dot = function (that) {
return (this.x*that.x + this.y*that.y + this.z*that.z);
};
x3dom.fields.SFVec3f.prototype.cross = function (that) {
return new x3dom.fields.SFVec3f( this.y*that.z - this.z*that.y,
this.z*that.x - this.x*that.z,
this.x*that.y - this.y*that.x );
};
x3dom.fields.SFVec3f.prototype.reflect = function (n) {
var d2 = this.dot(n)*2;
return new x3dom.fields.SFVec3f(this.x - d2*n.x, this.y - d2*n.y, this.z - d2*n.z);
};
x3dom.fields.SFVec3f.prototype.length = function() {
return Math.sqrt((this.x*this.x) + (this.y*this.y) + (this.z*this.z));
};
x3dom.fields.SFVec3f.prototype.normalize = function() {
var n = this.length();
if (n) { n = 1.0 / n; }
return new x3dom.fields.SFVec3f(this.x*n, this.y*n, this.z*n);
};
x3dom.fields.SFVec3f.prototype.multComponents = function (that) {
return new x3dom.fields.SFVec3f(this.x*that.x, this.y*that.y, this.z*that.z);
};
x3dom.fields.SFVec3f.prototype.multiply = function (n) {
return new x3dom.fields.SFVec3f(this.x*n, this.y*n, this.z*n);
};
x3dom.fields.SFVec3f.prototype.divide = function (n) {
var denom = n ? (1.0 / n) : 1.0;
return new x3dom.fields.SFVec3f(this.x*denom, this.y*denom, this.z*denom);
};
x3dom.fields.SFVec3f.prototype.equals = function (that, eps) {
return Math.abs(this.x - that.x) < eps &&
Math.abs(this.y - that.y) < eps &&
Math.abs(this.z - that.z) < eps;
};
x3dom.fields.SFVec3f.prototype.toGL = function () {
return [ this.x, this.y, this.z ];
};
x3dom.fields.SFVec3f.prototype.toString = function () {
return this.x + " " + this.y + " " + this.z;
};
x3dom.fields.SFVec3f.prototype.setValueByStr = function(str) {
try {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
this.x = +m[1];
this.y = +m[2];
this.z = +m[3];
}
catch (e) {
// allow automatic type conversion as is convenient for shaders
var c = x3dom.fields.SFColor.colorParse(str);
this.x = c.r;
this.y = c.g;
this.z = c.b;
}
return this;
};
///////////////////////////////////////////////////////////////////////////////
/** SFVec4f constructor.
@class Represents a SFVec4f
*/
x3dom.fields.SFVec4f = function(x, y, z, w) {
if (arguments.length === 0) {
this.x = 0;
this.y = 0;
this.z = 0;
this.w = 0;
}
else {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
};
x3dom.fields.SFVec4f.copy = function(v) {
return new x3dom.fields.SFVec4f(v.x, v.y, v.z, v.w);
};
x3dom.fields.SFVec4f.prototype.copy = function() {
return x3dom.fields.SFVec4f(this);
};
x3dom.fields.SFVec4f.parse = function (str) {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
return new x3dom.fields.SFVec4f(+m[1], +m[2], +m[3], +m[4]);
};
x3dom.fields.SFVec4f.prototype.setValueByStr = function(str) {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
this.x = +m[1];
this.y = +m[2];
this.z = +m[3];
this.w = +m[4];
return this;
};
x3dom.fields.SFVec4f.prototype.toGL = function () {
return [ this.x, this.y, this.z, this.w ];
};
x3dom.fields.SFVec4f.prototype.toString = function () {
return this.x + " " + this.y + " " + this.z + " " + this.w;
};
///////////////////////////////////////////////////////////////////////////////
/** Quaternion constructor.
@class Represents a Quaternion
*/
x3dom.fields.Quaternion = function(x, y, z, w) {
if (arguments.length === 0) {
this.x = 0;
this.y = 0;
this.z = 0;
this.w = 1;
}
else {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
};
x3dom.fields.Quaternion.copy = function(v) {
return new x3dom.fields.Quaternion(v.x, v.y, v.z, v.w);
};
x3dom.fields.Quaternion.prototype.multiply = function (that) {
return new x3dom.fields.Quaternion(
this.w*that.x + this.x*that.w + this.y*that.z - this.z*that.y,
this.w*that.y + this.y*that.w + this.z*that.x - this.x*that.z,
this.w*that.z + this.z*that.w + this.x*that.y - this.y*that.x,
this.w*that.w - this.x*that.x - this.y*that.y - this.z*that.z
);
};
x3dom.fields.Quaternion.parseAxisAngle = function (str) {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
return x3dom.fields.Quaternion.axisAngle(new x3dom.fields.SFVec3f(+m[1], +m[2], +m[3]), +m[4]);
};
x3dom.fields.Quaternion.axisAngle = function (axis, a) {
var t = axis.length();
if (t > x3dom.fields.Eps)
{
var s = Math.sin(a/2) / t;
var c = Math.cos(a/2);
return new x3dom.fields.Quaternion(axis.x*s, axis.y*s, axis.z*s, c);
}
else
{
return new x3dom.fields.Quaternion(0, 0, 0, 1);
}
};
x3dom.fields.Quaternion.prototype.copy = function() {
return x3dom.fields.Quaternion.copy(this);
};
x3dom.fields.Quaternion.prototype.toMatrix = function () {
var xx = this.x * this.x;
var xy = this.x * this.y;
var xz = this.x * this.z;
var yy = this.y * this.y;
var yz = this.y * this.z;
var zz = this.z * this.z;
var wx = this.w * this.x;
var wy = this.w * this.y;
var wz = this.w * this.z;
return new x3dom.fields.SFMatrix4f(
1 - 2 * (yy + zz), 2 * (xy - wz), 2 * (xz + wy), 0,
2 * (xy + wz), 1 - 2 * (xx + zz), 2 * (yz - wx), 0,
2 * (xz - wy), 2 * (yz + wx), 1 - 2 * (xx + yy), 0,
0, 0, 0, 1
);
};
x3dom.fields.Quaternion.prototype.toAxisAngle = function()
{
var x = 0, y = 0, z = 0;
var s = 0, a = 0;
var that = this;
if ( this.w > 1 )
{
that = x3dom.fields.Quaternion.normalize( this );
}
a = 2 * Math.acos( that.w );
s = Math.sqrt( 1 - that.w * that.w );
if ( s == 0 ) //< x3dom.fields.Eps )
{
x = that.x;
y = that.y;
z = that.z;
}
else
{
x = that.x / s;
y = that.y / s;
z = that.z / s;
}
return [ new x3dom.fields.SFVec3f(x,y,z), a ];
};
x3dom.fields.Quaternion.prototype.angle = function()
{
return 2 * Math.acos(this.w);
};
x3dom.fields.Quaternion.prototype.setValue = function(matrix)
{
var tr, s = 1;
var qt = [0, 0, 0];
var i = 0, j = 0, k = 0;
var nxt = [1, 2, 0];
tr = matrix._00 + matrix._11 + matrix._22;
if (tr > 0.0)
{
s = Math.sqrt(tr + 1.0);
this.w = s * 0.5;
s = 0.5 / s;
this.x = (matrix._21 - matrix._12) * s;
this.y = (matrix._02 - matrix._20) * s;
this.z = (matrix._10 - matrix._01) * s;
}
else
{
if (matrix._11 > matrix._00) {
i = 1;
}
else {
i = 0;
}
if (matrix._22 > matrix.at(i, i)) {
i = 2;
}
j = nxt[i];
k = nxt[j];
s = Math.sqrt(matrix.at(i, i) - (matrix.at(j, j) + matrix.at(k, k)) + 1.0);
qt[i] = s * 0.5;
s = 0.5 / s;
this.w = (matrix.at(k, j) - matrix.at(j, k)) * s;
qt[j] = (matrix.at(j, i) + matrix.at(i, j)) * s;
qt[k] = (matrix.at(k, i) + matrix.at(i, k)) * s;
this.x = qt[0];
this.y = qt[1];
this.z = qt[2];
}
if (this.w > 1.0 || this.w < -1.0)
{
var errThreshold = 1 + (x3dom.fields.Eps * 100);
if (this.w > errThreshold || this.w < -errThreshold)
{
// When copying, then everything, incl. the famous OpenSG MatToQuat bug
x3dom.debug.logInfo("MatToQuat: BUG: |quat[4]| (" + this.w +") >> 1.0 !");
}
if (this.w > 1.0) {
this.w = 1.0;
}
else {
this.w = -1.0;
}
}
};
x3dom.fields.Quaternion.prototype.setFromEuler = function (alpha, beta, gamma) {
var sx = Math.sin(alpha * 0.5);
var cx = Math.cos(alpha * 0.5);
var sy = Math.sin(beta * 0.5);
var cy = Math.cos(beta * 0.5);
var sz = Math.sin(gamma * 0.5);
var cz = Math.cos(gamma * 0.5);
this.x = (sx * cy * cz) - (cx * sy * sz);
this.y = (cx * sy * cz) + (sx * cy * sz);
this.z = (cx * cy * sz) - (sx * sy * cz);
this.w = (cx * cy * cz) + (sx * sy * sz);
};
x3dom.fields.Quaternion.prototype.dot = function (that) {
return this.x*that.x + this.y*that.y + this.z*that.z + this.w*that.w;
};
x3dom.fields.Quaternion.prototype.add = function (that) {
return new x3dom.fields.Quaternion(this.x + that.x, this.y + that.y, this.z + that.z, this.w + that.w);
};
x3dom.fields.Quaternion.prototype.subtract = function (that) {
return new x3dom.fields.Quaternion(this.x - that.x, this.y - that.y, this.z - that.z, this.w - that.w);
};
x3dom.fields.Quaternion.prototype.setValues = function (that) {
this.x = that.x;
this.y = that.y;
this.z = that.z;
this.w = that.w;
};
x3dom.fields.Quaternion.prototype.equals = function (that, eps) {
return (this.dot(that) >= 1.0 - eps);
};
x3dom.fields.Quaternion.prototype.multScalar = function (s) {
return new x3dom.fields.Quaternion(this.x*s, this.y*s, this.z*s, this.w*s);
};
x3dom.fields.Quaternion.prototype.normalize = function (that) {
var d2 = this.dot(that);
var id = 1.0;
if (d2) { id = 1.0 / Math.sqrt(d2); }
return new x3dom.fields.Quaternion(this.x*id, this.y*id, this.z*id, this.w*id);
};
x3dom.fields.Quaternion.prototype.negate = function() {
return new x3dom.fields.Quaternion(-this.x, -this.y, -this.z, -this.w);
};
x3dom.fields.Quaternion.prototype.inverse = function () {
return new x3dom.fields.Quaternion(-this.x, -this.y, -this.z, this.w);
};
x3dom.fields.Quaternion.prototype.slerp = function (that, t) {
// calculate the cosine
var cosom = this.dot(that);
var rot1;
// adjust signs if necessary
if (cosom < 0.0)
{
cosom = -cosom;
rot1 = that.negate();
}
else
{
rot1 = new x3dom.fields.Quaternion(that.x, that.y, that.z, that.w);
}
// calculate interpolating coeffs
var scalerot0, scalerot1;
if ((1.0 - cosom) > 0.00001)
{
// standard case
var omega = Math.acos(cosom);
var sinom = Math.sin(omega);
scalerot0 = Math.sin((1.0 - t) * omega) / sinom;
scalerot1 = Math.sin(t * omega) / sinom;
}
else
{
// rot0 and rot1 very close - just do linear interp.
scalerot0 = 1.0 - t;
scalerot1 = t;
}
// build the new quaternion
return this.multScalar(scalerot0).add(rot1.multScalar(scalerot1));
};
x3dom.fields.Quaternion.rotateFromTo = function (fromVec, toVec) {
var from = fromVec.normalize();
var to = toVec.normalize();
var cost = from.dot(to);
// check for degeneracies
if (cost > 0.99999)
{
// vectors are parallel
return new x3dom.fields.Quaternion(0, 0, 0, 1);
}
else if (cost < -0.99999)
{
// vectors are opposite
// find an axis to rotate around, which should be
// perpendicular to the original axis
// Try cross product with (1,0,0) first, if that's one of our
// original vectors then try (0,1,0).
var cAxis = new x3dom.fields.SFVec3f(1, 0, 0);
var tmp = from.cross(cAxis);
if (tmp.length() < 0.00001)
{
cAxis.x = 0;
cAxis.y = 1;
cAxis.z = 0;
tmp = from.cross(cAxis);
}
tmp = tmp.normalize();
return x3dom.fields.Quaternion.axisAngle(tmp, Math.PI);
}
var axis = fromVec.cross(toVec);
axis = axis.normalize();
// use half-angle formulae
// sin^2 t = ( 1 - cos (2t) ) / 2
var s = Math.sqrt(0.5 * (1.0 - cost));
axis = axis.multiply(s);
// scale the axis by the sine of half the rotation angle to get
// the normalized quaternion
// cos^2 t = ( 1 + cos (2t) ) / 2
// w part is cosine of half the rotation angle
s = Math.sqrt(0.5 * (1.0 + cost));
return new x3dom.fields.Quaternion(axis.x, axis.y, axis.z, s);
};
x3dom.fields.Quaternion.prototype.toGL = function () {
var val = this.toAxisAngle();
return [ val[0].x, val[0].y, val[0].z, val[1] ];
};
x3dom.fields.Quaternion.prototype.toString = function () {
return this.x + " " + this.y + " " + this.z + ", " + this.w;
};
x3dom.fields.Quaternion.prototype.setValueByStr = function(str) {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
var quat = x3dom.fields.Quaternion.axisAngle(new x3dom.fields.SFVec3f(+m[1], +m[2], +m[3]), +m[4]);
this.x = quat.x;
this.y = quat.y;
this.z = quat.z;
this.w = quat.w;
return this;
};
///////////////////////////////////////////////////////////////////////////////
/** SFColor constructor.
@class Represents a SFColor
*/
x3dom.fields.SFColor = function(r, g, b) {
if (arguments.length === 0) {
this.r = 0;
this.g = 0;
this.b = 0;
}
else {
this.r = r;
this.g = g;
this.b = b;
}
};
x3dom.fields.SFColor.parse = function(str) {
try {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
return new x3dom.fields.SFColor( +m[1], +m[2], +m[3] );
}
catch (e) {
return x3dom.fields.SFColor.colorParse(str);
}
};
x3dom.fields.SFColor.copy = function(that) {
return new x3dom.fields.SFColor(that.r, that.g, that.b);
};
x3dom.fields.SFColor.prototype.copy = function() {
return x3dom.fields.SFColor.copy(this);
};
x3dom.fields.SFColor.prototype.setHSV = function (h, s, v) {
x3dom.debug.logWarning("SFColor.setHSV() NYI");
};
x3dom.fields.SFColor.prototype.getHSV = function () {
var h = 0, s = 0, v = 0;
x3dom.debug.logWarning("SFColor.getHSV() NYI");
return [ h, s, v ];
};
x3dom.fields.SFColor.prototype.setValues = function (color) {
this.r = color.r;
this.g = color.g;
this.b = color.b;
};
x3dom.fields.SFColor.prototype.equals = function (that, eps) {
return Math.abs(this.r - that.r) < eps &&
Math.abs(this.g - that.g) < eps &&
Math.abs(this.b - that.b) < eps;
};
x3dom.fields.SFColor.prototype.add = function (that) {
return new x3dom.fields.SFColor(this.r + that.r, this.g + that.g, this.b + that.b);
};
x3dom.fields.SFColor.prototype.subtract = function (that) {
return new x3dom.fields.SFColor(this.r - that.r, this.g - that.g, this.b - that.b);
};
x3dom.fields.SFColor.prototype.multiply = function (n) {
return new x3dom.fields.SFColor(this.r*n, this.g*n, this.b*n);
};
x3dom.fields.SFColor.prototype.toGL = function () {
return [ this.r, this.g, this.b ];
};
x3dom.fields.SFColor.prototype.toString = function() {
return this.r + " " + this.g + " " + this.b;
};
x3dom.fields.SFColor.prototype.setValueByStr = function(str) {
try {
var m = /^\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*$/.exec(str);
this.r = +m[1];
this.g = +m[2];
this.b = +m[3];
}
catch (e) {
var c = x3dom.fields.SFColor.colorParse(str);
this.r = c.r;
this.g = c.g;
this.b = c.b;
}
return this;
};
x3dom.fields.SFColor.colorParse = function(color) {
var red = 0, green = 0, blue = 0;
// definition of css color names
var color_names = {
aliceblue: 'f0f8ff', antiquewhite: 'faebd7', aqua: '00ffff',
aquamarine: '7fffd4', azure: 'f0ffff', beige: 'f5f5dc',
bisque: 'ffe4c4', black: '000000', blanchedalmond: 'ffebcd',
blue: '0000ff', blueviolet: '8a2be2', brown: 'a52a2a',
burlywood: 'deb887', cadetblue: '5f9ea0', chartreuse: '7fff00',
chocolate: 'd2691e', coral: 'ff7f50', cornflowerblue: '6495ed',
cornsilk: 'fff8dc', crimson: 'dc143c', cyan: '00ffff',
darkblue: '00008b', darkcyan: '008b8b', darkgoldenrod: 'b8860b',
darkgray: 'a9a9a9', darkgreen: '006400', darkkhaki: 'bdb76b',
darkmagenta: '8b008b', darkolivegreen: '556b2f',darkorange: 'ff8c00',
darkorchid: '9932cc', darkred: '8b0000', darksalmon: 'e9967a',
darkseagreen: '8fbc8f', darkslateblue: '483d8b',darkslategray: '2f4f4f',
darkturquoise: '00ced1',darkviolet: '9400d3', deeppink: 'ff1493',
deepskyblue: '00bfff', dimgray: '696969', dodgerblue: '1e90ff',
feldspar: 'd19275', firebrick: 'b22222', floralwhite: 'fffaf0',
forestgreen: '228b22', fuchsia: 'ff00ff', gainsboro: 'dcdcdc',
ghostwhite: 'f8f8ff', gold: 'ffd700', goldenrod: 'daa520',
gray: '808080', green: '008000', greenyellow: 'adff2f',
honeydew: 'f0fff0', hotpink: 'ff69b4', indianred : 'cd5c5c',
indigo : '4b0082', ivory: 'fffff0', khaki: 'f0e68c',
lavender: 'e6e6fa', lavenderblush: 'fff0f5',lawngreen: '7cfc00',
lemonchiffon: 'fffacd', lightblue: 'add8e6', lightcoral: 'f08080',
lightcyan: 'e0ffff', lightgoldenrodyellow: 'fafad2', lightgrey: 'd3d3d3',
lightgreen: '90ee90', lightpink: 'ffb6c1', lightsalmon: 'ffa07a',
lightseagreen: '20b2aa',lightskyblue: '87cefa', lightslateblue: '8470ff',
lightslategray: '778899',lightsteelblue: 'b0c4de',lightyellow: 'ffffe0',
lime: '00ff00', limegreen: '32cd32', linen: 'faf0e6',
magenta: 'ff00ff', maroon: '800000', mediumaquamarine: '66cdaa',
mediumblue: '0000cd', mediumorchid: 'ba55d3', mediumpurple: '9370d8',
mediumseagreen: '3cb371',mediumslateblue: '7b68ee', mediumspringgreen: '00fa9a',
mediumturquoise: '48d1cc',mediumvioletred: 'c71585',midnightblue: '191970',
mintcream: 'f5fffa', mistyrose: 'ffe4e1', moccasin: 'ffe4b5',
navajowhite: 'ffdead', navy: '000080', oldlace: 'fdf5e6',
olive: '808000', olivedrab: '6b8e23', orange: 'ffa500',
orangered: 'ff4500', orchid: 'da70d6', palegoldenrod: 'eee8aa',
palegreen: '98fb98', paleturquoise: 'afeeee',palevioletred: 'd87093',
papayawhip: 'ffefd5', peachpuff: 'ffdab9', peru: 'cd853f',
pink: 'ffc0cb', plum: 'dda0dd', powderblue: 'b0e0e6',
purple: '800080', red: 'ff0000', rosybrown: 'bc8f8f',
royalblue: '4169e1', saddlebrown: '8b4513', salmon: 'fa8072',
sandybrown: 'f4a460', seagreen: '2e8b57', seashell: 'fff5ee',
sienna: 'a0522d', silver: 'c0c0c0', skyblue: '87ceeb',
slateblue: '6a5acd', slategray: '708090', snow: 'fffafa',
springgreen: '00ff7f', steelblue: '4682b4', tan: 'd2b48c',
teal: '008080', thistle: 'd8bfd8', tomato: 'ff6347',
turquoise: '40e0d0', violet: 'ee82ee', violetred: 'd02090',
wheat: 'f5deb3', white: 'ffffff', whitesmoke: 'f5f5f5',
yellow: 'ffff00', yellowgreen: '9acd32'
};
if (color_names[color]) {
// first check if color is given as colorname
color = "#" + color_names[color];
}
if (color.substr && color.substr(0,1) === "#") {
color = color.substr(1);
var len = color.length;
if (len === 6) {
red = parseInt("0x"+color.substr(0,2), 16) / 255.0;
green = parseInt("0x"+color.substr(2,2), 16) / 255.0;
blue = parseInt("0x"+color.substr(4,2), 16) / 255.0;
}
else if (len === 3) {
red = parseInt("0x"+color.substr(0,1), 16) / 15.0;
green = parseInt("0x"+color.substr(1,1), 16) / 15.0;
blue = parseInt("0x"+color.substr(2,1), 16) / 15.0;
}
}
return new x3dom.fields.SFColor( red, green, blue );
};
///////////////////////////////////////////////////////////////////////////////
/** SFColorRGBA constructor.
@class Represents a SFColorRGBA
*/
x3dom.fields.SFColorRGBA = function(r, g, b, a) {
if (arguments.length === 0) {
this.r = 0;
this.g = 0;
this.b = 0;
this.a = 1;
}
else {
this.r = r;
this.g = g;
this.b = b;
this.a = a;
}
};
x3dom.fields.SFColorRGBA.parse = function(str) {
try {
var m = /^([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)$/.exec(str);
return new x3dom.fields.SFColorRGBA( +m[1], +m[2], +m[3], +m[4] );
}
catch (e) {
return x3dom.fields.SFColorRGBA.colorParse(str);
}
};
x3dom.fields.SFColorRGBA.copy = function(that) {
return new x3dom.fields.SFColorRGBA(that.r, that.g, that.b, that.a);
};
x3dom.fields.SFColorRGBA.prototype.copy = function() {
return x3dom.fields.SFColorRGBA.copy(this);
};
x3dom.fields.SFColorRGBA.prototype.setValues = function (color) {
this.r = color.r;
this.g = color.g;
this.b = color.b;
this.a = color.a;
};
x3dom.fields.SFColorRGBA.prototype.equals = function (that, eps) {
return Math.abs(this.r - that.r) < eps &&
Math.abs(this.g - that.g) < eps &&
Math.abs(this.b - that.b) < eps &&
Math.abs(this.a - that.a) < eps;
};
x3dom.fields.SFColorRGBA.prototype.toGL = function () {
return [ this.r, this.g, this.b, this.a ];
};
x3dom.fields.SFColorRGBA.prototype.toString = function() {
return this.r + " " + this.g + " " + this.b + " " + this.a;
};
x3dom.fields.SFColorRGBA.prototype.setValueByStr = function(str) {
try {
var m = /^([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)\s*,?\s*([+\-]?\d*\.*\d*[eE]?[+\-]?\d*?)$/.exec(str);
this.r = +m[1];
this.g = +m[2];
this.b = +m[3];
this.a = +m[4];
}
catch (e) {
var c = x3dom.fields.SFColorRGBA.colorParse(str);
this.r = c.r;
this.g = c.g;
this.b = c.b;
this.a = c.a;
}
return this;
};
x3dom.fields.SFColorRGBA.prototype.toUint = function() {
return ((Math.round(this.r * 255) << 24) |
(Math.round(this.g * 255) << 16) |
(Math.round(this.b * 255) << 8) |
Math.round(this.a * 255)) >>> 0;
};
x3dom.fields.SFColorRGBA.colorParse = function(color) {
var red = 0, green = 0, blue = 0, alpha = 0;
// definition of css color names
var color_names = {
aliceblue: 'f0f8ff', antiquewhite: 'faebd7', aqua: '00ffff',
aquamarine: '7fffd4', azure: 'f0ffff', beige: 'f5f5dc',
bisque: 'ffe4c4', black: '000000', blanchedalmond: 'ffebcd',
blue: '0000ff', blueviolet: '8a2be2', brown: 'a52a2a',
burlywood: 'deb887', cadetblue: '5f9ea0', chartreuse: '7fff00',
chocolate: 'd2691e', coral: 'ff7f50', cornflowerblue: '6495ed',
cornsilk: 'fff8dc', crimson: 'dc143c', cyan: '00ffff',
darkblue: '00008b', darkcyan: '008b8b', darkgoldenrod: 'b8860b',
darkgray: 'a9a9a9', darkgreen: '006400', darkkhaki: 'bdb76b',
darkmagenta: '8b008b', darkolivegreen: '556b2f',darkorange: 'ff8c00',
darkorchid: '9932cc', darkred: '8b0000', darksalmon: 'e9967a',
darkseagreen: '8fbc8f', darkslateblue: '483d8b',darkslategray: '2f4f4f',
darkturquoise: '00ced1',darkviolet: '9400d3', deeppink: 'ff1493',
deepskyblue: '00bfff', dimgray: '696969', dodgerblue: '1e90ff',
feldspar: 'd19275', firebrick: 'b22222', floralwhite: 'fffaf0',
forestgreen: '228b22', fuchsia: 'ff00ff', gainsboro: 'dcdcdc',
ghostwhite: 'f8f8ff', gold: 'ffd700', goldenrod: 'daa520',
gray: '808080', green: '008000', greenyellow: 'adff2f',
honeydew: 'f0fff0', hotpink: 'ff69b4', indianred : 'cd5c5c',
indigo : '4b0082', ivory: 'fffff0', khaki: 'f0e68c',
lavender: 'e6e6fa', lavenderblush: 'fff0f5',lawngreen: '7cfc00',
lemonchiffon: 'fffacd', lightblue: 'add8e6', lightcoral: 'f08080',
lightcyan: 'e0ffff', lightgoldenrodyellow: 'fafad2', lightgrey: 'd3d3d3',
lightgreen: '90ee90', lightpink: 'ffb6c1', lightsalmon: 'ffa07a',
lightseagreen: '20b2aa',lightskyblue: '87cefa', lightslateblue: '8470ff',
lightslategray: '778899',lightsteelblue: 'b0c4de',lightyellow: 'ffffe0',
lime: '00ff00', limegreen: '32cd32', linen: 'faf0e6',
magenta: 'ff00ff', maroon: '800000', mediumaquamarine: '66cdaa',
mediumblue: '0000cd', mediumorchid: 'ba55d3', mediumpurple: '9370d8',
mediumseagreen: '3cb371',mediumslateblue: '7b68ee', mediumspringgreen: '00fa9a',
mediumturquoise: '48d1cc',mediumvioletred: 'c71585',midnightblue: '191970',
mintcream: 'f5fffa', mistyrose: 'ffe4e1', moccasin: 'ffe4b5',
navajowhite: 'ffdead', navy: '000080', oldlace: 'fdf5e6',
olive: '808000', olivedrab: '6b8e23', orange: 'ffa500',
orangered: 'ff4500', orchid: 'da70d6', palegoldenrod: 'eee8aa',
palegreen: '98fb98', paleturquoise: 'afeeee',palevioletred: 'd87093',
papayawhip: 'ffefd5', peachpuff: 'ffdab9', peru: 'cd853f',
pink: 'ffc0cb', plum: 'dda0dd', powderblue: 'b0e0e6',
purple: '800080', red: 'ff0000', rosybrown: 'bc8f8f',
royalblue: '4169e1', saddlebrown: '8b4513', salmon: 'fa8072',
sandybrown: 'f4a460', seagreen: '2e8b57', seashell: 'fff5ee',
sienna: 'a0522d', silver: 'c0c0c0', skyblue: '87ceeb',
slateblue: '6a5acd', slategray: '708090', snow: 'fffafa',
springgreen: '00ff7f', steelblue: '4682b4', tan: 'd2b48c',
teal: '008080', thistle: 'd8bfd8', tomato: 'ff6347',
turquoise: '40e0d0', violet: 'ee82ee', violetred: 'd02090',
wheat: 'f5deb3', white: 'ffffff', whitesmoke: 'f5f5f5',
yellow: 'ffff00', yellowgreen: '9acd32'
};
if (color_names[color]) {
// first check if color is given as colorname
color = "#" + color_names[color] + "ff";
}
if (color.substr && color.substr(0,1) === "#") {
color = color.substr(1);
var len = color.length;
if (len === 8) {
red = parseInt("0x"+color.substr(0,2), 16) / 255.0;
green = parseInt("0x"+color.substr(2,2), 16) / 255.0;
blue = parseInt("0x"+color.substr(4,2), 16) / 255.0;
alpha = parseInt("0x"+color.substr(6,2), 16) / 255.0;
}
else if (len === 6) {
red = parseInt("0x"+color.substr(0,2), 16) / 255.0;
green = parseInt("0x"+color.substr(2,2), 16) / 255.0;
blue = parseInt("0x"+color.substr(4,2), 16) / 255.0;
alpha = 1.0;
}
else if (len === 4) {
red = parseInt("0x"+color.substr(0,1), 16) / 15.0;
green = parseInt("0x"+color.substr(1,1), 16) / 15.0;
blue = parseInt("0x"+color.substr(2,1), 16) / 15.0;
alpha = parseInt("0x"+color.substr(3,1), 16) / 15.0;
}
else if (len === 3) {
red = parseInt("0x"+color.substr(0,1), 16) / 15.0;
green = parseInt("0x"+color.substr(1,1), 16) / 15.0;
blue = parseInt("0x"+color.substr(2,1), 16) / 15.0;
alpha = 1.0;
}
}
return new x3dom.fields.SFColorRGBA( red, green, blue, alpha );
};
///////////////////////////////////////////////////////////////////////////////
/** SFImage constructor.
@class Represents an SFImage
*/
x3dom.fields.SFImage = function(w, h, c, arr) {
if (arguments.length === 0 || !(arr && arr.map)) {
this.width = 0;
this.height = 0;
this.comp = 0;
this.array = [];
}
else {
this.width = w;
this.height = h;
this.comp = c;
var that = this.array;
arr.map( function(v) { that.push(v); }, this.array );
}
};
x3dom.fields.SFImage.parse = function(str) {
var img = new x3dom.fields.SFImage();
img.setValueByStr(str);
return img;
};
x3dom.fields.SFImage.copy = function(that) {
var destination = new x3dom.fields.SFImage();
destination.width = that.width;
destination.height = that.height;
destination.comp = that.comp;
//use instead slice?
//destination.array = that.array.slice();
destination.setPixels(that.getPixels());
return destination;
};
x3dom.fields.SFImage.prototype.copy = function() {
return x3dom.fields.SFImage.copy(this);
};
x3dom.fields.SFImage.prototype.setValueByStr = function(str) {
var mc = str.match(/(\w+)/g);
var n = mc.length;
var c2 = 0;
var hex = "0123456789ABCDEF";
this.array = [];
if (n > 2) {
this.width = +mc[0];
this.height = +mc[1];
this.comp = +mc[2];
c2 = 2 * this.comp;
} else {
this.width = 0;
this.height = 0;
this.comp = 0;
return;
}
var len, i;
for (i=3; i<n; i++) {
var r, g, b, a;
if (!mc[i].substr) {
continue;
}
if (mc[i].substr(1,1).toLowerCase() !== "x") {
// Maybe optimize by directly parsing value!
var inp = parseInt(mc[i], 10);
if (this.comp === 1) {
r = inp;
this.array.push( r );
}
else if (this.comp === 2) {
r = inp >> 8 & 255;
g = inp & 255;
this.array.push( r, g );
}
else if (this.comp === 3) {
r = inp >> 16 & 255;
g = inp >> 8 & 255;
b = inp & 255;
this.array.push( r, g, b );
}
else if (this.comp === 4) {
r = inp >> 24 & 255;
g = inp >> 16 & 255;
b = inp >> 8 & 255;
a = inp & 255;
this.array.push( r, g, b, a );
}
}
else if (mc[i].substr(1,1).toLowerCase() === "x") {
mc[i] = mc[i].substr(2);
len = mc[i].length;
if (len === c2) {
if (this.comp === 1) {
r = parseInt("0x"+mc[i].substr(0,2), 16);
this.array.push( r );
}
else if (this.comp === 2) {
r = parseInt("0x"+mc[i].substr(0,2), 16);
g = parseInt("0x"+mc[i].substr(2,2), 16);
this.array.push( r, g );
}
else if (this.comp === 3) {
r = parseInt("0x"+mc[i].substr(0,2), 16);
g = parseInt("0x"+mc[i].substr(2,2), 16);
b = parseInt("0x"+mc[i].substr(4,2), 16);
this.array.push( r, g, b );
}
else if (this.comp === 4) {
r = parseInt("0x"+mc[i].substr(0,2), 16);
g = parseInt("0x"+mc[i].substr(2,2), 16);
b = parseInt("0x"+mc[i].substr(4,2), 16);
a = parseInt("0x"+mc[i].substr(6,2), 16);
this.array.push( r, g, b, a );
}
}
}
}
};
x3dom.fields.SFImage.prototype.setPixel = function(x, y, color) {
var startIdx = (y * this.width + x) * this.comp;
if (this.comp === 1 && startIdx < this.array.length) {
this.array[startIdx] = color.r * 255;
}
else if (this.comp === 2 && (startIdx+1) < this.array.length) {
this.array[startIdx ] = color.r * 255;
this.array[startIdx+1] = color.g * 255;
}
else if (this.comp === 3 && (startIdx+2) < this.array.length) {
this.array[startIdx ] = color.r * 255;
this.array[startIdx+1] = color.g * 255;
this.array[startIdx+2] = color.b * 255;
}
else if (this.comp === 4 && (startIdx+3) < this.array.length) {
this.array[startIdx ] = color.r * 255;
this.array[startIdx+1] = color.g * 255;
this.array[startIdx+2] = color.b * 255;
this.array[startIdx+3] = color.a * 255;
}
};
x3dom.fields.SFImage.prototype.getPixel = function(x, y) {
var startIdx = (y * this.width + x) * this.comp;
if (this.comp === 1 && startIdx < this.array.length) {
return new x3dom.fields.SFColorRGBA(this.array[startIdx] / 255,
0,
0,
1);
}
else if (this.comp === 2 && (startIdx+1) < this.array.length) {
return new x3dom.fields.SFColorRGBA(this.array[startIdx] / 255,
this.array[startIdx+1] / 255,
0,
1);
}
else if (this.comp === 3 && (startIdx+2) < this.array.length) {
return new x3dom.fields.SFColorRGBA(this.array[startIdx] / 255,
this.array[startIdx+1] / 255,
this.array[startIdx+2] / 255,
1);
}
else if (this.comp === 4 && (startIdx+3) < this.array.length) {
return new x3dom.fields.SFColorRGBA(this.array[startIdx] / 255,
this.array[startIdx+1] / 255,
this.array[startIdx+2] / 255,
this.array[startIdx+3] / 255);
}
};
x3dom.fields.SFImage.prototype.setPixels = function(pixels) {
var i, idx = 0;
if (this.comp === 1) {
for(i=0; i<pixels.length; i++) {
this.array[idx++] = pixels[i].r * 255;
}
}
else if (this.comp === 2) {
for(i=0; i<pixels.length; i++) {
this.array[idx++] = pixels[i].r * 255;
this.array[idx++] = pixels[i].g * 255;
}
}
else if (this.comp === 3) {
for(i=0; i<pixels.length; i++) {
this.array[idx++] = pixels[i].r * 255;
this.array[idx++] = pixels[i].g * 255;
this.array[idx++] = pixels[i].b * 255;
}
}
else if (this.comp === 4) {
for(i=0; i<pixels.length; i++) {
this.array[idx++] = pixels[i].r * 255;
this.array[idx++] = pixels[i].g * 255;
this.array[idx++] = pixels[i].b * 255;
this.array[idx++] = pixels[i].a * 255;
}
}
};
x3dom.fields.SFImage.prototype.getPixels = function() {
var i;
var pixels = [];
if (this.comp === 1) {
for (i=0; i<this.array.length; i+=this.comp){
pixels.push(new x3dom.fields.SFColorRGBA(this.array[i] / 255,
0,
0,
1));
}
}
else if (this.comp === 2) {
for (i=0; i<this.array.length; i+=this.comp) {
pixels.push(new x3dom.fields.SFColorRGBA(this.array[i ] / 255,
this.array[i + 1] / 255,
0,
1));
}
}
else if (this.comp === 3) {
for (i=0; i<this.array.length; i+=this.comp) {
pixels.push(new x3dom.fields.SFColorRGBA(this.array[i ] / 255,
this.array[i + 1] / 255,
this.array[i + 2] / 255,
1));
}
}
else if (this.comp === 4) {
for (i=0; i<this.array.length; i+=this.comp) {
pixels.push(new x3dom.fields.SFColorRGBA(this.array[i ] / 255,
this.array[i + 1] / 255,
this.array[i + 2] / 255,
this.array[i + 3] / 255));
}
}
return pixels;
};
x3dom.fields.SFImage.prototype.toGL = function() {
var a = [];
Array.map( this.array, function(c) {
a.push(c);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
// Multi-Field Definitions
///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
/** MFColor constructor.
@class Represents a MFColor
*/
x3dom.fields.MFColor = function(colorArray) {
if (colorArray) {
var that = this;
colorArray.map( function(c) { that.push(c); }, this );
}
};
x3dom.fields.MFColor.copy = function(colorArray) {
var destination = new x3dom.fields.MFColor();
colorArray.map( function(v) { destination.push(v.copy()); }, this );
return destination;
};
x3dom.fields.MFColor.prototype = x3dom.extend([]);
x3dom.fields.MFColor.parse = function(str) {
var mc = str.match(/([+\-0-9eE\.]+)/g);
var colors = [];
for (var i=0, n=mc?mc.length:0; i<n; i+=3) {
colors.push( new x3dom.fields.SFColor(+mc[i+0], +mc[i+1], +mc[i+2]) );
}
return new x3dom.fields.MFColor( colors );
};
x3dom.fields.MFColor.prototype.copy = function() {
return x3dom.fields.MFColor.copy(this);
};
x3dom.fields.MFColor.prototype.setValueByStr = function(str) {
this.length = 0;
var mc = str.match(/([+\-0-9eE\.]+)/g);
for (var i=0, n=mc?mc.length:0; i<n; i+=3) {
this.push( new x3dom.fields.SFColor(+mc[i+0], +mc[i+1], +mc[i+2]) );
}
};
x3dom.fields.MFColor.prototype.toGL = function() {
var a = [];
Array.map( this, function(c) {
a.push(c.r);
a.push(c.g);
a.push(c.b);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
/** MFColorRGBA constructor.
@class Represents a MFColorRGBA
*/
x3dom.fields.MFColorRGBA = function(colorArray) {
if (colorArray) {
var that = this;
colorArray.map( function(c) { that.push(c); }, this );
}
};
x3dom.fields.MFColorRGBA.copy = function(colorArray) {
var destination = new x3dom.fields.MFColorRGBA();
colorArray.map( function(v) { destination.push(v.copy()); }, this );
return destination;
};
x3dom.fields.MFColorRGBA.prototype = x3dom.extend([]);
x3dom.fields.MFColorRGBA.parse = function(str) {
var mc = str.match(/([+\-0-9eE\.]+)/g);
var colors = [];
for (var i=0, n=mc?mc.length:0; i<n; i+=4) {
colors.push( new x3dom.fields.SFColorRGBA(+mc[i+0], +mc[i+1], +mc[i+2], +mc[i+3]) );
}
return new x3dom.fields.MFColorRGBA( colors );
};
x3dom.fields.MFColorRGBA.prototype.copy = function() {
return x3dom.fields.MFColorRGBA.copy(this);
};
x3dom.fields.MFColorRGBA.prototype.setValueByStr = function(str) {
this.length = 0;
var mc = str.match(/([+\-0-9eE\.]+)/g);
for (var i=0, n=mc?mc.length:0; i<n; i+=4) {
this.push( new x3dom.fields.SFColorRGBA(+mc[i+0], +mc[i+1], +mc[i+2], +mc[i+3]) );
}
};
x3dom.fields.MFColorRGBA.prototype.toGL = function() {
var a = [];
Array.map( this, function(c) {
a.push(c.r);
a.push(c.g);
a.push(c.b);
a.push(c.a);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
/** MFRotation constructor.
@class Represents a MFRotation
*/
x3dom.fields.MFRotation = function(rotArray) {
if (rotArray) {
var that = this;
rotArray.map( function(v) { that.push(v); }, this );
}
};
x3dom.fields.MFRotation.prototype = x3dom.extend([]);
x3dom.fields.MFRotation.copy = function(rotationArray) {
var destination = new x3dom.fields.MFRotation();
rotationArray.map( function(v) { destination.push(v.copy()); }, this );
return destination;
};
x3dom.fields.MFRotation.prototype.copy = function() {
return x3dom.fields.MFRotation.copy(this);
};
x3dom.fields.MFRotation.parse = function(str) {
var mc = str.match(/([+\-0-9eE\.]+)/g);
var vecs = [];
for (var i=0, n=mc?mc.length:0; i<n; i+=4) {
vecs.push( x3dom.fields.Quaternion.axisAngle(new x3dom.fields.SFVec3f(+mc[i+0], +mc[i+1], +mc[i+2]), +mc[i+3]) );
}
// holds the quaternion representation as needed by interpolators etc.
return new x3dom.fields.MFRotation( vecs );
};
x3dom.fields.MFRotation.prototype.setValueByStr = function(str) {
this.length = 0;
var mc = str.match(/([+\-0-9eE\.]+)/g);
for (var i=0, n=mc?mc.length:0; i<n; i+=4) {
this.push( x3dom.fields.Quaternion.axisAngle(new x3dom.fields.SFVec3f(+mc[i+0], +mc[i+1], +mc[i+2]), +mc[i+3]) );
}
};
x3dom.fields.MFRotation.prototype.toGL = function() {
var a = [];
Array.map( this, function(c) {
var val = c.toAxisAngle();
a.push(val[0].x);
a.push(val[0].y);
a.push(val[0].z);
a.push(val[1]);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
/** MFVec3f constructor.
@class Represents a MFVec3f
*/
x3dom.fields.MFVec3f = function(vec3Array) {
if (vec3Array) {
var that = this;
vec3Array.map( function(v) { that.push(v); }, this );
}
};
x3dom.fields.MFVec3f.prototype = x3dom.extend([]);
x3dom.fields.MFVec3f.copy = function(vec3Array) {
var destination = new x3dom.fields.MFVec3f();
vec3Array.map( function(v) { destination.push(v.copy()); }, this );
return destination;
};
x3dom.fields.MFVec3f.parse = function(str) {
var mc = str.match(/([+\-0-9eE\.]+)/g);
var vecs = [];
for (var i=0, n=mc?mc.length:0; i<n; i+=3) {
vecs.push( new x3dom.fields.SFVec3f(+mc[i+0], +mc[i+1], +mc[i+2]) );
}
return new x3dom.fields.MFVec3f( vecs );
};
x3dom.fields.MFVec3f.prototype.copy = function()
{
x3dom.fields.MFVec3f.copy(this);
};
x3dom.fields.MFVec3f.prototype.setValueByStr = function(str) {
this.length = 0;
var mc = str.match(/([+\-0-9eE\.]+)/g);
for (var i=0, n=mc?mc.length:0; i<n; i+=3) {
this.push( new x3dom.fields.SFVec3f(+mc[i+0], +mc[i+1], +mc[i+2]) );
}
};
x3dom.fields.MFVec3f.prototype.toGL = function() {
var a = [];
Array.map( this, function(c) {
a.push(c.x);
a.push(c.y);
a.push(c.z);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
/** MFVec2f constructor.
@class Represents a MFVec2f
*/
x3dom.fields.MFVec2f = function(vec2Array) {
if (vec2Array) {
var that = this;
vec2Array.map( function(v) { that.push(v); }, this );
}
};
x3dom.fields.MFVec2f.prototype = x3dom.extend([]);
x3dom.fields.MFVec2f.copy = function(vec2Array) {
var destination = new x3dom.fields.MFVec2f();
vec2Array.map( function(v) { destination.push(v.copy()); }, this );
return destination;
};
x3dom.fields.MFVec2f.parse = function(str) {
var mc = str.match(/([+\-0-9eE\.]+)/g);
var vecs = [];
for (var i=0, n=mc?mc.length:0; i<n; i+=2) {
vecs.push( new x3dom.fields.SFVec2f(+mc[i+0], +mc[i+1]) );
}
return new x3dom.fields.MFVec2f( vecs );
};
x3dom.fields.MFVec2f.prototype.copy = function() {
return x3dom.fields.MFVec2f.copy(this);
};
x3dom.fields.MFVec2f.prototype.setValueByStr = function(str) {
this.length = 0;
var mc = str.match(/([+\-0-9eE\.]+)/g);
for (var i=0, n=mc?mc.length:0; i<n; i+=2) {
this.push( new x3dom.fields.SFVec2f(+mc[i+0], +mc[i+1]) );
}
};
x3dom.fields.MFVec2f.prototype.toGL = function() {
var a = [];
Array.map( this, function(v) {
a.push(v.x);
a.push(v.y);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
/** MFInt32 constructor.
@class Represents a MFInt32
*/
x3dom.fields.MFInt32 = function(array) {
if (array) {
var that = this;
array.map( function(v) { that.push(v); }, this );
}
};
x3dom.fields.MFInt32.prototype = x3dom.extend([]);
x3dom.fields.MFInt32.copy = function(intArray) {
var destination = new x3dom.fields.MFInt32();
intArray.map( function(v) { destination.push(v); }, this );
return destination;
};
x3dom.fields.MFInt32.parse = function(str) {
var mc = str.match(/([+\-]?\d+\s*){1},?\s*/g);
var vals = [];
for (var i=0, n=mc?mc.length:0; i<n; ++i) {
vals.push( parseInt(mc[i], 10) );
}
return new x3dom.fields.MFInt32( vals );
};
x3dom.fields.MFInt32.prototype.copy = function() {
return x3dom.fields.MFInt32.copy(this);
};
x3dom.fields.MFInt32.prototype.setValueByStr = function(str) {
this.length = 0;
var mc = str.match(/([+\-]?\d+\s*){1},?\s*/g);
for (var i=0, n=mc?mc.length:0; i<n; ++i) {
this.push( parseInt(mc[i], 10) );
}
};
x3dom.fields.MFInt32.prototype.toGL = function() {
var a = [];
Array.map( this, function(v) {
a.push(v);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
/** MFFloat constructor.
@class Represents a MFFloat
*/
x3dom.fields.MFFloat = function(array) {
if (array) {
var that = this;
array.map( function(v) { that.push(v); }, this );
}
};
x3dom.fields.MFFloat.prototype = x3dom.extend([]);
x3dom.fields.MFFloat.copy = function(floatArray) {
var destination = new x3dom.fields.MFFloat();
floatArray.map( function(v) { destination.push(v); }, this );
return destination;
};
x3dom.fields.MFFloat.parse = function(str) {
var mc = str.match(/([+\-0-9eE\.]+)/g);
var vals = [];
for (var i=0, n=mc?mc.length:0; i<n; i++) {
vals.push( +mc[i] );
}
return new x3dom.fields.MFFloat( vals );
};
x3dom.fields.MFFloat.prototype.copy = function() {
return x3dom.fields.MFFloat.copy(this);
};
x3dom.fields.MFFloat.prototype.setValueByStr = function(str) {
this.length = 0;
var mc = str.match(/([+\-0-9eE\.]+)/g);
for (var i=0, n=mc?mc.length:0; i<n; i++) {
this.push( +mc[i] );
}
};
x3dom.fields.MFFloat.prototype.toGL = function() {
var a = [];
Array.map( this, function(v) {
a.push(v);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
/** MFBoolean constructor.
@class Represents a MFBoolean
*/
x3dom.fields.MFBoolean = function(array) {
if (array) {
var that = this;
array.map( function(v) { that.push(v); }, this );
}
};
x3dom.fields.MFBoolean.prototype = x3dom.extend([]);
x3dom.fields.MFBoolean.copy = function(boolArray) {
var destination = new x3dom.fields.MFBoolean();
boolArray.map( function(v) { destination.push(v); }, this );
return destination;
};
x3dom.fields.MFBoolean.parse = function(str) {
var mc = str.match(/(true|false|1|0)/ig);
var vals = [];
for (var i=0, n=mc?mc.length:0; i<n; i++) {
vals.push( (mc[i] == '1' || mc[i].toLowerCase() == 'true') );
}
return new x3dom.fields.MFBoolean( vals );
};
x3dom.fields.MFBoolean.prototype.copy = function() {
return x3dom.fields.MFBoolean.copy(this);
};
x3dom.fields.MFBoolean.prototype.setValueByStr = function(str) {
this.length = 0;
var mc = str.match(/(true|false|1|0)/ig);
for (var i=0, n=mc?mc.length:0; i<n; i++) {
this.push( (mc[i] == '1' || mc[i].toLowerCase() == 'true') );
}
};
x3dom.fields.MFBoolean.prototype.toGL = function() {
var a = [];
Array.map( this, function(v) {
a.push(v ? 1 : 0);
});
return a;
};
///////////////////////////////////////////////////////////////////////////////
/** MFString constructor.
@class Represents a MFString
*/
x3dom.fields.MFString = function(strArray) {
if (strArray && strArray.map) {
var that = this;
strArray.map( function(v) { that.push(v); }, this );
}
};
x3dom.fields.MFString.prototype = x3dom.extend([]);
x3dom.fields.MFString.copy = function(stringArray) {
var destination = new x3dom.fields.MFString();
stringArray.map( function(v) { destination.push(v); }, this );
return destination;
};
x3dom.fields.MFString.parse = function(str) {
var arr = [];
// ignore leading whitespace?
if (str.length && str[0] == '"') {
var m, re = /"((?:[^\\"]|\\\\|\\")*)"/g;
while ((m = re.exec(str))) {
var s = m[1].replace(/\\([\\"])/, "$1");
if (s !== undefined) {
arr.push(s);
}
}
}
else {
arr.push(str);
}
return new x3dom.fields.MFString( arr );
};
x3dom.fields.MFString.prototype.copy = function() {
return x3dom.fields.MFString.copy(this);
};
x3dom.fields.MFString.prototype.setValueByStr = function(str) {
this.length = 0;
// ignore leading whitespace?
if (str.length && str[0] == '"') {
var m, re = /"((?:[^\\"]|\\\\|\\")*)"/g;
while ((m = re.exec(str))) {
var s = m[1].replace(/\\([\\"])/, "$1");
if (s !== undefined) {
this.push(s);
}
}
}
else {
this.push(str);
}
return this;
};
x3dom.fields.MFString.prototype.toString = function () {
var str = "";
for (var i=0, n=this.length; i<n; i++) {
str = str + this[i] + " ";
}
return str;
};
///////////////////////////////////////////////////////////////////////////////
// Single-/Multi-Field Node Definitions
///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
/** SFNode constructor.
@class Represents a SFNode
*/
x3dom.fields.SFNode = function(type) {
this.type = type;
this.node = null;
};
x3dom.fields.SFNode.prototype.hasLink = function(node) {
return (node ? (this.node === node) : this.node);
};
x3dom.fields.SFNode.prototype.addLink = function(node) {
this.node = node;
return true;
};
x3dom.fields.SFNode.prototype.rmLink = function(node) {
if (this.node === node) {
this.node = null;
return true;
}
else {
return false;
}
};
///////////////////////////////////////////////////////////////////////////////
/** MFNode constructor.
@class Represents a MFNode
*/
x3dom.fields.MFNode = function(type) {
this.type = type;
this.nodes = [];
};
x3dom.fields.MFNode.prototype.hasLink = function(node) {
if (node) {
for (var i = 0, n = this.nodes.length; i < n; i++) {
if (this.nodes[i] === node) {
return true;
}
}
}
else {
return (this.length > 0);
}
return false;
};
x3dom.fields.MFNode.prototype.addLink = function(node) {
this.nodes.push (node);
return true;
};
x3dom.fields.MFNode.prototype.rmLink = function(node) {
for (var i = 0, n = this.nodes.length; i < n; i++) {
if (this.nodes[i] === node) {
this.nodes.splice(i,1);
return true;
}
}
return false;
};
x3dom.fields.MFNode.prototype.length = function() {
return this.nodes.length;
};
///////////////////////////////////////////////////////////////////////////////
// Math Helper Class Definitions
///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
/**
* Line constructor.
* @param {SFVec3f} pos - anchor point of the line
* @param {SFVec3f} dir - direction of the line, must be normalized
* @class Represents a Line (as internal helper).
* A line has an origin and a vector that describes a direction, it is infinite in both directions.
*/
x3dom.fields.Line = function(pos, dir)
{
if (arguments.length === 0)
{
this.pos = new x3dom.fields.SFVec3f(0, 0, 0);
this.dir = new x3dom.fields.SFVec3f(0, 0, 1);
}
this.pos = x3dom.fields.SFVec3f.copy(pos);
this.dir = x3dom.fields.SFVec3f.copy(dir);
};
/**
* For a given point, this function returns the closest point on this line.
* @param p {x3dom.fields.SFVec3f} - the point
* @returns {x3dom.fields.SFVec3f} the closest point
*/
x3dom.fields.Line.prototype.closestPoint = function(p)
{
var distVec = p.subtract(this.pos);
//project the distance vector on the line
var projDist = distVec.dot(this.dir);
return this.pos.add(this.dir.multiply(projDist));
};
/**
* For a given point, this function returns the distance to the closest point on this line.
* @param p {x3dom.fields.SFVec3f} - the point
* @returns {Number} the distance to the closest point
*/
x3dom.fields.Line.prototype.shortestDistance = function(p)
{
var distVec = p.subtract(this.pos);
//project the distance vector on the line
var projDist = distVec.dot(this.dir);
//subtract the projected distance vector, to obtain the part that is orthogonal to this line
return distVec.subtract(this.dir.multiply(projDist)).length();
};
///////////////////////////////////////////////////////////////////////////////
/**
* Ray constructor.
* @param {SFVec3f} pos - anchor point of the ray
* @param {SFVec3f} dir - direction of the ray, must be normalized
* @class Represents a Ray (as internal helper).
* A ray is a special line that extends to only one direction from its origin.
*/
x3dom.fields.Ray = function(pos, dir)
{
if (arguments.length === 0)
{
this.pos = new x3dom.fields.SFVec3f(0, 0, 0);
this.dir = new x3dom.fields.SFVec3f(0, 0, 1);
}
else
{
this.pos = new x3dom.fields.SFVec3f(pos.x, pos.y, pos.z);
var n = dir.length();
if (n) { n = 1.0 / n; }
this.dir = new x3dom.fields.SFVec3f(dir.x*n, dir.y*n, dir.z*n);
}
this.enter = 0;
this.exit = 0;
this.hitObject = null;
this.hitPoint = {};
this.dist = Number.MAX_VALUE;
};
x3dom.fields.Ray.prototype.toString = function () {
return 'Ray: [' + this.pos.toString() + '; ' + this.dir.toString() + ']';
};
/**
* Intersects this ray with a plane, defined by the given anchor point and normal.
* The result returned is the point of intersection, if any. If no point of intersection exists, null is returned.
* Null is also returned in case there is an infinite number of solutions (, i.e., if the ray origin lies in the plane).
*
* @param p {x3dom.fields.SFVec3f} - anchor point
* @param n {x3dom.fields.SFVec3f} - plane normal
* @returns {x3dom.fields.SFVec3f} the point of intersection, can be null
*/
x3dom.fields.Ray.prototype.intersectPlane = function(p, n)
{
var result = null;
var alpha; //ray parameter, should be computed
var nDotDir = n.dot(this.dir);
//if the ray hits the plane, the plane normal and ray direction must be facing each other
if (nDotDir < 0.0)
{
alpha = (p.dot(n) - this.pos.dot(n)) / nDotDir;
result = this.pos.addScaled(this.dir, alpha);
}
return result;
};
/** intersect line with box volume given by low and high */
x3dom.fields.Ray.prototype.intersect = function(low, high)
{
var isect = 0.0;
var out = Number.MAX_VALUE;
var r, te, tl;
if (this.dir.x > x3dom.fields.Eps)
{
r = 1.0 / this.dir.x;
te = (low.x - this.pos.x) * r;
tl = (high.x - this.pos.x) * r;
if (tl < out){
out = tl;
}
if (te > isect){
isect = te;
}
}
else if (this.dir.x < -x3dom.fields.Eps)
{
r = 1.0 / this.dir.x;
te = (high.x - this.pos.x) * r;
tl = (low.x - this.pos.x) * r;
if (tl < out){
out = tl;
}
if (te > isect) {
isect = te;
}
}
else if (this.pos.x < low.x || this.pos.x > high.x)
{
return false;
}
if (this.dir.y > x3dom.fields.Eps)
{
r = 1.0 / this.dir.y;
te = (low.y - this.pos.y) * r;
tl = (high.y - this.pos.y) * r;
if (tl < out){
out = tl;
}
if (te > isect) {
isect = te;
}
if (isect-out >= x3dom.fields.Eps) {
return false;
}
}
else if (this.dir.y < -x3dom.fields.Eps)
{
r = 1.0 / this.dir.y;
te = (high.y - this.pos.y) * r;
tl = (low.y - this.pos.y) * r;
if (tl < out){
out = tl;
}
if (te > isect) {
isect = te;
}
if (isect-out >= x3dom.fields.Eps) {
return false;
}
}
else if (this.pos.y < low.y || this.pos.y > high.y)
{
return false;
}
if (this.dir.z > x3dom.fields.Eps)
{
r = 1.0 / this.dir.z;
te = (low.z - this.pos.z) * r;
tl = (high.z - this.pos.z) * r;
if (tl < out) {
out = tl;
}
if (te > isect) {
isect = te;
}
}
else if (this.dir.z < -x3dom.fields.Eps)
{
r = 1.0 / this.dir.z;
te = (high.z - this.pos.z) * r;
tl = (low.z - this.pos.z) * r;
if (tl < out) {
out = tl;
}
if (te > isect) {
isect = te;
}
}
else if (this.pos.z < low.z || this.pos.z > high.z)
{
return false;
}
this.enter = isect;
this.exit = out;
return (isect-out < x3dom.fields.Eps);
};
///////////////////////////////////////////////////////////////////////////////
/** BoxVolume constructor.
@class Represents a box volume (as internal helper).
*/
x3dom.fields.BoxVolume = function(min, max)
{
if (arguments.length < 2) {
this.min = new x3dom.fields.SFVec3f(0, 0, 0);
this.max = new x3dom.fields.SFVec3f(0, 0, 0);
this.valid = false;
}
else {
// compiler enforced type check for min/max would be nice
this.min = x3dom.fields.SFVec3f.copy(min);
this.max = x3dom.fields.SFVec3f.copy(max);
this.valid = true;
}
this.updateInternals();
};
x3dom.fields.BoxVolume.prototype.getScalarValue = function()
{
var extent = this.max.subtract(this.min);
return (extent.x*extent.y*extent.z);
};
x3dom.fields.BoxVolume.copy = function(other)
{
return new x3dom.fields.BoxVolume(other.min, other.max);
};
x3dom.fields.BoxVolume.prototype.updateInternals = function()
{
this.radialVec = this.max.subtract(this.min).multiply(0.5);
this.center = this.min.add(this.radialVec);
this.diameter = 2 * this.radialVec.length();
};
x3dom.fields.BoxVolume.prototype.setBounds = function(min, max)
{
this.min.setValues(min);
this.max.setValues(max);
this.updateInternals();
this.valid = true;
};
x3dom.fields.BoxVolume.prototype.setBoundsByCenterSize = function(center, size)
{
var halfSize = size.multiply(0.5);
this.min = center.subtract(halfSize);
this.max = center.add(halfSize);
this.updateInternals();
this.valid = true;
};
x3dom.fields.BoxVolume.prototype.extendBounds = function(min, max)
{
if (this.valid)
{
if (this.min.x > min.x) { this.min.x = min.x; }
if (this.min.y > min.y) { this.min.y = min.y; }
if (this.min.z > min.z) { this.min.z = min.z; }
if (this.max.x < max.x) { this.max.x = max.x; }
if (this.max.y < max.y) { this.max.y = max.y; }
if (this.max.z < max.z) { this.max.z = max.z; }
this.updateInternals();
}
else
{
this.setBounds(min, max);
}
};
x3dom.fields.BoxVolume.prototype.getBounds = function(min, max)
{
min.setValues(this.min);
max.setValues(this.max);
};
x3dom.fields.BoxVolume.prototype.getRadialVec = function()
{
return this.radialVec;
};
x3dom.fields.BoxVolume.prototype.invalidate = function()
{
this.valid = false;
this.min = new x3dom.fields.SFVec3f(0, 0, 0);
this.max = new x3dom.fields.SFVec3f(0, 0, 0);
};
x3dom.fields.BoxVolume.prototype.isValid = function()
{
return this.valid;
};
x3dom.fields.BoxVolume.prototype.getCenter = function()
{
return this.center;
};
x3dom.fields.BoxVolume.prototype.getDiameter = function()
{
return this.diameter;
};
x3dom.fields.BoxVolume.prototype.transform = function(m)
{
var xmin, ymin, zmin;
var xmax, ymax, zmax;
xmin = xmax = m._03;
ymin = ymax = m._13;
zmin = zmax = m._23;
// calculate xmin and xmax of new transformed BBox
var a = this.max.x * m._00;
var b = this.min.x * m._00;
if (a >= b) {
xmax += a;
xmin += b;
}
else {
xmax += b;
xmin += a;
}
a = this.max.y * m._01;
b = this.min.y * m._01;
if (a >= b) {
xmax += a;
xmin += b;
}
else {
xmax += b;
xmin += a;
}
a = this.max.z * m._02;
b = this.min.z * m._02;
if (a >= b) {
xmax += a;
xmin += b;
}
else {
xmax += b;
xmin += a;
}
// calculate ymin and ymax of new transformed BBox
a = this.max.x * m._10;
b = this.min.x * m._10;
if (a >= b) {
ymax += a;
ymin += b;
}
else {
ymax += b;
ymin += a;
}
a = this.max.y * m._11;
b = this.min.y * m._11;
if (a >= b) {
ymax += a;
ymin += b;
}
else {
ymax += b;
ymin += a;
}
a = this.max.z * m._12;
b = this.min.z * m._12;
if (a >= b) {
ymax += a;
ymin += b;
}
else {
ymax += b;
ymin += a;
}
// calculate zmin and zmax of new transformed BBox
a = this.max.x * m._20;
b = this.min.x * m._20;
if (a >= b) {
zmax += a;
zmin += b;
}
else {
zmax += b;
zmin += a;
}
a = this.max.y * m._21;
b = this.min.y * m._21;
if (a >= b) {
zmax += a;
zmin += b;
}
else {
zmax += b;
zmin += a;
}
a = this.max.z * m._22;
b = this.min.z * m._22;
if (a >= b) {
zmax += a;
zmin += b;
}
else {
zmax += b;
zmin += a;
}
this.min.x = xmin;
this.min.y = ymin;
this.min.z = zmin;
this.max.x = xmax;
this.max.y = ymax;
this.max.z = zmax;
this.updateInternals();
};
x3dom.fields.BoxVolume.prototype.transformFrom = function(m, other)
{
var xmin, ymin, zmin;
var xmax, ymax, zmax;
xmin = xmax = m._03;
ymin = ymax = m._13;
zmin = zmax = m._23;
// calculate xmin and xmax of new transformed BBox
var a = other.max.x * m._00;
var b = other.min.x * m._00;
if (a >= b) {
xmax += a;
xmin += b;
}
else {
xmax += b;
xmin += a;
}
a = other.max.y * m._01;
b = other.min.y * m._01;
if (a >= b) {
xmax += a;
xmin += b;
}
else {
xmax += b;
xmin += a;
}
a = other.max.z * m._02;
b = other.min.z * m._02;
if (a >= b) {
xmax += a;
xmin += b;
}
else {
xmax += b;
xmin += a;
}
// calculate ymin and ymax of new transformed BBox
a = other.max.x * m._10;
b = other.min.x * m._10;
if (a >= b) {
ymax += a;
ymin += b;
}
else {
ymax += b;
ymin += a;
}
a = other.max.y * m._11;
b = other.min.y * m._11;
if (a >= b) {
ymax += a;
ymin += b;
}
else {
ymax += b;
ymin += a;
}
a = other.max.z * m._12;
b = other.min.z * m._12;
if (a >= b) {
ymax += a;
ymin += b;
}
else {
ymax += b;
ymin += a;
}
// calculate zmin and zmax of new transformed BBox
a = other.max.x * m._20;
b = other.min.x * m._20;
if (a >= b) {
zmax += a;
zmin += b;
}
else {
zmax += b;
zmin += a;
}
a = other.max.y * m._21;
b = other.min.y * m._21;
if (a >= b) {
zmax += a;
zmin += b;
}
else {
zmax += b;
zmin += a;
}
a = other.max.z * m._22;
b = other.min.z * m._22;
if (a >= b) {
zmax += a;
zmin += b;
}
else {
zmax += b;
zmin += a;
}
this.min.x = xmin;
this.min.y = ymin;
this.min.z = zmin;
this.max.x = xmax;
this.max.y = ymax;
this.max.z = zmax;
this.updateInternals();
this.valid = true;
};
///////////////////////////////////////////////////////////////////////////////
/** FrustumVolume constructor.
@class Represents a frustum (as internal helper).
*/
x3dom.fields.FrustumVolume = function(clipMat)
{
this.planeNormals = [];
this.planeDistances = [];
this.directionIndex = [];
if (arguments.length === 0) {
return;
}
var planeEquation = [];
for (var i=0; i<6; i++) {
this.planeNormals[i] = new x3dom.fields.SFVec3f(0, 0, 0);
this.planeDistances[i] = 0;
this.directionIndex[i] = 0;
planeEquation[i] = new x3dom.fields.SFVec4f(0, 0, 0, 0);
}
planeEquation[0].x = clipMat._30 - clipMat._00;
planeEquation[0].y = clipMat._31 - clipMat._01;
planeEquation[0].z = clipMat._32 - clipMat._02;
planeEquation[0].w = clipMat._33 - clipMat._03;
planeEquation[1].x = clipMat._30 + clipMat._00;
planeEquation[1].y = clipMat._31 + clipMat._01;
planeEquation[1].z = clipMat._32 + clipMat._02;
planeEquation[1].w = clipMat._33 + clipMat._03;
planeEquation[2].x = clipMat._30 + clipMat._10;
planeEquation[2].y = clipMat._31 + clipMat._11;
planeEquation[2].z = clipMat._32 + clipMat._12;
planeEquation[2].w = clipMat._33 + clipMat._13;
planeEquation[3].x = clipMat._30 - clipMat._10;
planeEquation[3].y = clipMat._31 - clipMat._11;
planeEquation[3].z = clipMat._32 - clipMat._12;
planeEquation[3].w = clipMat._33 - clipMat._13;
planeEquation[4].x = clipMat._30 + clipMat._20;
planeEquation[4].y = clipMat._31 + clipMat._21;
planeEquation[4].z = clipMat._32 + clipMat._22;
planeEquation[4].w = clipMat._33 + clipMat._23;
planeEquation[5].x = clipMat._30 - clipMat._20;
planeEquation[5].y = clipMat._31 - clipMat._21;
planeEquation[5].z = clipMat._32 - clipMat._22;
planeEquation[5].w = clipMat._33 - clipMat._23;
for (i=0; i<6; i++) {
var vectorLength = Math.sqrt(planeEquation[i].x * planeEquation[i].x +
planeEquation[i].y * planeEquation[i].y +
planeEquation[i].z * planeEquation[i].z);
planeEquation[i].x /= vectorLength;
planeEquation[i].y /= vectorLength;
planeEquation[i].z /= vectorLength;
planeEquation[i].w /= -vectorLength;
}
var updateDirectionIndex = function(normalVec) {
var ind = 0;
if (normalVec.x > 0) ind |= 1;
if (normalVec.y > 0) ind |= 2;
if (normalVec.z > 0) ind |= 4;
return ind;
};
// right
this.planeNormals[3].setValues(planeEquation[0]);
this.planeDistances[3] = planeEquation[0].w;
this.directionIndex[3] = updateDirectionIndex(this.planeNormals[3]);
// left
this.planeNormals[2].setValues(planeEquation[1]);
this.planeDistances[2] = planeEquation[1].w;
this.directionIndex[2] = updateDirectionIndex(this.planeNormals[2]);
// bottom
this.planeNormals[5].setValues(planeEquation[2]);
this.planeDistances[5] = planeEquation[2].w;
this.directionIndex[5] = updateDirectionIndex(this.planeNormals[5]);
// top
this.planeNormals[4].setValues(planeEquation[3]);
this.planeDistances[4] = planeEquation[3].w;
this.directionIndex[4] = updateDirectionIndex(this.planeNormals[4]);
// near
this.planeNormals[0].setValues(planeEquation[4]);
this.planeDistances[0] = planeEquation[4].w;
this.directionIndex[0] = updateDirectionIndex(this.planeNormals[0]);
// far
this.planeNormals[1].setValues(planeEquation[5]);
this.planeDistances[1] = planeEquation[5].w;
this.directionIndex[1] = updateDirectionIndex(this.planeNormals[1]);
};
/** Check the volume against the frustum. */
x3dom.fields.FrustumVolume.prototype.intersect = function(vol, planeMask)
{
if (this.planeNormals.length < 6) {
x3dom.debug.logWarning("FrustumVolume not initialized!");
return false;
}
var that = this;
var min = vol.min, max = vol.max;
var setDirectionIndexPoint = function(index) {
var pnt = new x3dom.fields.SFVec3f(0, 0, 0);
if (index & 1) { pnt.x = min.x; }
else { pnt.x = max.x; }
if (index & 2) { pnt.y = min.y; }
else { pnt.y = max.y; }
if (index & 4) { pnt.z = min.z; }
else { pnt.z = max.z; }
return pnt;
};
//Check if the point is in the halfspace
var pntIsInHalfSpace = function(i, pnt) {
var s = that.planeNormals[i].dot(pnt) - that.planeDistances[i];
return (s >= 0);
};
//Check if the box formed by min/max is fully inside the halfspace
var isInHalfSpace = function(i) {
var p = setDirectionIndexPoint(that.directionIndex[i]);
return pntIsInHalfSpace(i, p);
};
//Check if the box formed by min/max is fully outside the halfspace
var isOutHalfSpace = function(i) {
var p = setDirectionIndexPoint(that.directionIndex[i] ^ 7);
return !pntIsInHalfSpace(i, p);
};
//Check each point of the box to the 6 planes
var mask = 1;
if (planeMask < 0) planeMask = 0;
for (var i=0; i<6; i++, mask<<=1) {
if ((planeMask & mask) != 0)
continue;
if (isOutHalfSpace(i))
return -1;
if (isInHalfSpace(i))
planeMask |= mask;
}
return planeMask;
};
| G |
value_stack.py | # encoding: utf-8
"""A self maintained value stack."""
from __future__ import annotations
import dataclasses
import enum
import functools
import inspect
import sys
from copy import copy
from dis import Instruction
from types import FrameType
from typing import Optional
try:
from typing import TYPE_CHECKING, Literal
except ImportError:
from typing_extensions import Literal
from . import utils
from .basis import (
Symbol,
Binding,
Mutation,
Deletion,
JumpBackToLoopStart,
ExceptionInfo,
)
from .block_stack import BlockStack, BlockType, Block
if TYPE_CHECKING:
from .frame import Snapshot
class ValueStackException(Exception):
pass
# Sometimes we need to put a _placeholder on TOS because we don't care about its value,
# like LOAD_CONST. We convert it to [] when putting it on the stack.
_placeholder = None
class _NullClass:
def __repr__(self):
return "NULL"
# The NULL value pushed by BEGIN_FINALLY, WITH_CLEANUP_FINISH, LOAD_METHOD.
NULL = _NullClass()
def emit_event(f):
"""Decorator used to denote that a handler emits at least one event.
It is used for:
1. Documentation purposes.
2. In case there's a need to determine whether a handler emits any event.
"""
@functools.wraps(f)
def inner(*args, **kwargs):
return f(*args, **kwargs)
inner.emit_event = True
return inner
@dataclasses.dataclass
class EventInfo:
type: Literal[Binding, Mutation, Deletion, JumpBackToLoopStart]
target: Symbol = None
sources: set[Symbol] = dataclasses.field(default_factory=set)
jump_target: int = None
class Why(enum.Enum):
UNINITIALIZED = 0
NOT = 1 # No error
EXCEPTION = 2 # Exception occurred
RETURN = 3 # 'return' statement
BREAK = 4 # 'break' statement
CONTINUE = 5 # 'continue' statement
YIELD = 6 # 'yield' operator
SILENCED = 8 # Exception silenced by 'with'
class BaseValueStack:
"""Class that simulates the a frame's value stack.
This class contains instr handlers that are the same across different versions.
"""
def __init__(self):
self.stack = []
self.block_stack = BlockStack()
self.last_exception: Optional[ExceptionInfo] = None
self.return_value = _placeholder
self.handler_signature_cache: dict[str, set[str]] = {} # keyed by opname.
self.snapshot = None
def update_snapshot(self, mutated_identifier: str, new_snapshot: Snapshot):
"""Updates snapshot after an identifier has been mutated.
e.g. `a`'s value is pushed to value stack, then `a` is mutated. We need to
update the snapshot bound to `a`'s symbol so that later doing tracing,
we can get the correct predecessor event of `a`, which is the mutation
event.
Note that Binding event does not change the object on value stack, so no need
to update.
"""
for item in self.stack:
for symbol in item:
if symbol.name == mutated_identifier:
symbol.snapshot = new_snapshot
def emit_event_and_update_stack(
self,
instr: Instruction,
frame: FrameType,
jumped: bool,
exc_info: Optional[ExceptionInfo],
snapshot: Snapshot,
) -> Optional[EventInfo]:
"""Given a instruction, emits EventInfo if any, and updates the stack.
Args:
instr: current instruction.
jumped: whether jump just happened.
frame: current frame.
exc_info: implicitly raised exception if any, or None.
snapshot: frame state snapshot.
"""
self.snapshot = snapshot
opname = instr.opname
if opname.startswith("BINARY") or opname.startswith("INPLACE"):
# Binary operations are all the same.
handler = self._BINARY_operation_handler
else:
try:
handler = getattr(self, f"_{opname}_handler")
except AttributeError:
raise AttributeError(f"Please add\ndef _{opname}_handler(self, instr):")
# Pass arguments on demand.
try:
parameters = self.handler_signature_cache[opname]
except KeyError:
parameters = set(inspect.signature(handler).parameters)
self.handler_signature_cache[opname] = parameters
# noinspection PyArgumentList
return handler(
*[
arg
for param_name, arg in {
"instr": instr,
"jumped": jumped,
"frame": frame,
"exc_info": exc_info,
}.items()
if param_name in parameters
]
)
@property
def stack_level(self):
return len(self.stack)
@property
def tos(self):
return self._tos(0)
@property
def tos1(self):
return self._tos(1)
@property
def tos2(self):
return self._tos(2)
def _tos(self, n):
"""Returns the i-th element on the stack. Stack keeps unchanged."""
index = -1 - n
try:
return self.stack[index]
except IndexError:
raise ValueStackException(
f"Value stack should at least have {-index} elements",
f", but only has {len(self.stack)}.",
)
def _push(self, *values):
"""Pushes values onto the simulated value stack.
This method will automatically convert single value to a list. _placeholder will
be converted to an empty list, so that it never exists on the value stack.
Str is converted to Symbol.
"""
for value in values:
if value is _placeholder:
value = []
elif isinstance(value, str): # For representing identifiers.
value = [Symbol(name=value, snapshot=self.snapshot)]
elif isinstance(value, list):
for index, item in enumerate(value):
if isinstance(item, str):
value[index] = Symbol(item, snapshot=self.snapshot)
else: # Already a Symbol.
# Why copy? Because the symbols on the stack might be modified
# later in the update_snapshot method. If we don't copy, a
# symbol that's already been popped out of the stack will be
# affected by the change (if it has the same name with the
# modified symbol on the stack). A copy will make symbols
# isolated from each other.
value[index] = copy(value[index])
# For NULL or int used by block related handlers, keep the original value.
self.stack.append(value)
def _pop(self, n=1):
"""Pops and returns n item from stack."""
try:
if n == 1:
return self.stack.pop()
return [self.stack.pop() for _ in range(n)]
except IndexError:
raise ValueStackException("Value stack should have tos but is empty.")
def _pop_n_push_one(self, n):
"""Pops n elements from TOS, and pushes one to TOS.
The pushed element is expected to originates from the popped elements.
"""
elements = []
for _ in range(n):
tos = self._pop()
if isinstance(tos, list):
# Flattens identifiers in TOS, leave out others (NULL, int).
elements.extend(tos)
self._push(elements)
def _pop_one_push_n(self, n):
"""Pops one elements from TOS, and pushes n elements to TOS.
The pushed elements are expected to originates from the popped element.
"""
tos = self._pop()
for _ in range(n):
self._push(tos)
def _push_block(self, b_type: BlockType):
self.block_stack.push(Block(b_level=self.stack_level, b_type=b_type))
def _pop_block(self):
return self.block_stack.pop()
def _unwind_block(self, b: Block):
while self.stack_level > b.b_level:
self._pop()
def _instruction_successfully_executed(
self, exc_info: Optional[ExceptionInfo], opname: str
) -> bool:
"""Returns true if there's no exception, otherwise false."""
if exc_info:
sys.stdout.buffer.write(
f"⚠️ Exception happened in {opname}\n".encode("utf-8")
)
self._store_exception(exc_info)
return False
return True
def _POP_TOP_handler(self):
self._pop()
def _ROT_TWO_handler(self):
tos, tos1 = self._pop(2)
self._push(tos)
self._push(tos1)
def _DUP_TOP_handler(self):
self._push(self.tos)
def _DUP_TOP_TWO_handler(self):
tos1, tos = self.tos1, self.tos
self._push(tos1)
self._push(tos)
def _ROT_THREE_handler(self):
self.stack[-3], self.stack[-2], self.stack[-1] = (
self.tos,
self.tos2,
self.tos1,
)
def _UNARY_POSITIVE_handler(self, instr):
pass
def _UNARY_NEGATIVE_handler(self, instr):
pass
def _UNARY_NOT_handler(self, instr):
pass
def _UNARY_INVERT_handler(self, instr):
pass
def _BINARY_operation_handler(self, exc_info):
tos, tos1 = self._pop(2)
if self._instruction_successfully_executed(exc_info, "BINARY op"):
self._push(utils.flatten(tos, tos1))
@emit_event
def _STORE_SUBSCR_handler(self, exc_info):
tos, tos1, tos2 = self._pop(3)
if self._instruction_successfully_executed(exc_info, "STORE_SUBSCR"):
# We use to `assert len(tos1) == 1`, but in certain cases, like
# os.environ["foo"] = "2", tos1 is [].
if tos1:
return EventInfo(
type=Mutation, target=tos1[0], sources=set(tos + tos1 + tos2)
)
# noinspection DuplicatedCode
@emit_event
def _DELETE_SUBSCR_handler(self, exc_info):
tos, tos1 = self._pop(2)
assert len(tos1) == 1
if self._instruction_successfully_executed(exc_info, "DELETE_SUBSCR"):
return EventInfo(type=Mutation, target=tos1[0], sources=set(tos + tos1))
def _YIELD_VALUE_handler(self):
"""
As of now, YIELD_VALUE is not handled specially. In the future, we may add a
Yield event and treat it specially in the trace graph.
"""
self._pop()
# When the __next__ method is called on a generator, and the execution resumes
# from where yield left off, None or the argument of gen.send() is put onto
# the value stack. YIELD_VALUE is always followed by a POP_TOP, which then pops
# this value.
# See https://github.com/python/cpython/blob/master/Objects/genobject.c#L197
# and https://www.cnblogs.com/coder2012/p/4990834.html for a code walk through.
self._push(_placeholder)
def _YIELD_FROM_handler(self):
self._pop()
def _SETUP_ANNOTATIONS_handler(self):
pass
def _IMPORT_STAR_handler(self):
# It's impossible to know what names are loaded, and we don't really care.
self._pop()
@emit_event
def _STORE_NAME_handler(self, instr):
binding = EventInfo(
type=Binding, target=Symbol(instr.argval), sources=set(self.tos)
)
self._pop()
return binding
@emit_event
def _DELETE_NAME_handler(self, instr, exc_info):
if self._instruction_successfully_executed(exc_info, "DELETE_NAME"):
return EventInfo(type=Deletion, target=Symbol(instr.argrepr))
def _UNPACK_SEQUENCE_handler(self, instr, exc_info):
seq = self._pop()
if self._instruction_successfully_executed(exc_info, "UNPACK_SEQUENCE"):
for _ in range(instr.arg):
self._push(seq)
def _UNPACK_EX_handler(self, instr, exc_info):
assert instr.arg <= 65535 # At most one extended arg.
higher_byte, lower_byte = instr.arg >> 8, instr.arg & 0x00FF
number_of_receivers = lower_byte + 1 + higher_byte
seq = self._pop()
if self._instruction_successfully_executed(exc_info, "UNPACK_EX"):
for _ in range(number_of_receivers):
self._push(seq)
@emit_event
def _STORE_ATTR_handler(self, exc_info):
tos, tos1 = self._pop(2)
assert len(tos) == 1
if self._instruction_successfully_executed(exc_info, "STORE_ATTR"):
return EventInfo(type=Mutation, target=tos[0], sources=set(tos + tos1))
@emit_event
def _DELETE_ATTR_handler(self, exc_info):
tos = self._pop()
assert len(tos) == 1
if self._instruction_successfully_executed(exc_info, "DELETE_ATTR"):
return EventInfo(type=Mutation, target=tos[0], sources=set(tos))
@emit_event
def _STORE_GLOBAL_handler(self, instr):
return self._STORE_NAME_handler(instr)
@emit_event
def _DELETE_GLOBAL_handler(self, instr, exc_info):
if self._instruction_successfully_executed(exc_info, "DELETE_GLOBAL"):
return EventInfo(type=Deletion, target=Symbol(instr.argrepr))
def _BUILD_TUPLE_handler(self, instr):
self._pop_n_push_one(instr.arg)
def _BUILD_LIST_handler(self, instr):
self._BUILD_TUPLE_handler(instr)
def _BUILD_SET_handler(self, instr, exc_info):
items = self._pop(instr.arg)
if self._instruction_successfully_executed(exc_info, "BUILD_SET"):
self._push(utils.flatten(items))
def _BUILD_MAP_handler(self, instr, exc_info):
items = self._pop(instr.arg * 2)
if self._instruction_successfully_executed(exc_info, "BUILD_MAP"):
self._push(utils.flatten(items))
def _BUILD_CONST_KEY_MAP_handler(self, instr):
self._pop_n_push_one(instr.arg + 1)
def _BUILD_STRING_handler(self, instr):
self._pop_n_push_one(instr.arg)
def _BUILD_TUPLE_UNPACK_handler(self, instr, exc_info):
items = self._pop(instr.arg)
if self._instruction_successfully_executed(exc_info, "BUILD_TUPLE_UNPACK"):
self._push(utils.flatten(items))
def _BUILD_TUPLE_UNPACK_WITH_CALL_handler(self, instr, exc_info):
items = self._pop(instr.arg)
if self._instruction_successfully_executed(
exc_info, "BUILD_TUPLE_UNPACK_WITH_CALL"
):
self._push(utils.flatten(items))
def _BUILD_LIST_UNPACK_handler(self, instr, exc_info):
items = self._pop(instr.arg)
if self._instruction_successfully_executed(exc_info, "BUILD_LIST_UNPACK"):
self._push(utils.flatten(items))
def _BUILD_SET_UNPACK_handler(self, instr, exc_info):
items = self._pop(instr.arg)
if self._instruction_successfully_executed(exc_info, "BUILD_SET_UNPACK"):
self._push(utils.flatten(items))
def _BUILD_MAP_UNPACK_handler(self, instr, exc_info):
items = self._pop(instr.arg)
if self._instruction_successfully_executed(exc_info, "BUILD_MAP_UNPACK"):
self._push(utils.flatten(items))
def _BUILD_MAP_UNPACK_WITH_CALL_handler(self, instr, exc_info):
items = self._pop(instr.arg)
if self._instruction_successfully_executed(
exc_info, "BUILD_MAP_UNPACK_WITH_CALL"
):
self._push(utils.flatten(items))
def _LOAD_ATTR_handler(self, exc_info):
"""Event the behavior of LOAD_ATTR.
The effect of LOAD_ATTR is: Replaces TOS with getattr(TOS, co_names[namei]).
However, this will make back tracing hard, because it eliminates the information
about the source object, where the attribute originates from.
Example: a = b.x
0 LOAD_NAME 0 (b)
2 LOAD_ATTR 1 (x)
4 STORE_NAME 2 (a)
6 LOAD_CONST 0 (None)
8 RETURN_VALUE
Tos was 'b' (in our customized version of LOAD_NAME), then LOAD_ATTR replaces it
with the value of `b.x`. This caused 'a' to lost from the value stack, but we
need it to know that b caused a to change.
Example: a.x.y = 1
0 LOAD_CONST 0 (1)
2 LOAD_NAME 0 (a)
4 LOAD_ATTR 1 (x)
6 STORE_ATTR 2 (y)
8 LOAD_CONST 1 (None)
10 RETURN_VALUE
Tos was 'a', then LOAD_ATTR replaces it with the value of `a.x`. This caused 'a'
to lost from the value stack, but we need it to know that a's value has changed.
In both examples, we can see what the value of the attribute doesn't mean much
to us, so it's fine if we don't store it. But the "name" of the source object is
vital, so we need to keep it. Thus, the handler just does nothing.
"""
self._instruction_successfully_executed(exc_info, "LOAD_ATTR")
def _COMPARE_OP_handler(self, exc_info):
return self._BINARY_operation_handler(exc_info)
def _IMPORT_NAME_handler(self, exc_info):
self._pop(2)
if self._instruction_successfully_executed(exc_info, "IMPORT_NAME"):
self._push(_placeholder)
def _IMPORT_FROM_handler(self, exc_info):
if self._instruction_successfully_executed(exc_info, "IMPORT_FROM"):
self._push(_placeholder)
def _LOAD_CONST_handler(self):
self._push(_placeholder)
def _LOAD_NAME_handler(self, instr, frame, exc_info):
if self._instruction_successfully_executed(exc_info, "LOAD_NAME"):
self._push(self._fetch_value_for_load_instruction(instr.argrepr, frame))
def _LOAD_GLOBAL_handler(self, instr, frame, exc_info):
if self._instruction_successfully_executed(exc_info, "LOAD_GLOBAL"):
self._push(self._fetch_value_for_load_instruction(instr.argrepr, frame))
def _LOAD_FAST_handler(self, instr, frame, exc_info):
if self._instruction_successfully_executed(exc_info, "LOAD_FAST"):
self._push(self._fetch_value_for_load_instruction(instr.argrepr, frame))
def _fetch_value_for_load_instruction(self, name, frame):
"""Transforms the value to be loaded onto value stack based on their types.
The rules are:
1. If the value is an exception class or instance, use the real value
2. If the value is a built-in object, or tracer, or module, ignore it and stores
a placeholder instead.
3. Others, most likely a variable from user's code, stores the identifier.
"""
val = utils.get_value_from_frame(name, frame)
if utils.is_exception(val):
# Keeps exceptions as they are so that they can be identified.
return val
if utils.should_ignore_event(target=name, value=val, frame=frame):
return []
return name
@emit_event
def _STORE_FAST_handler(self, instr):
return self._STORE_NAME_handler(instr)
def _LOAD_CLOSURE_handler(self, instr, frame, exc_info):
if self._instruction_successfully_executed(exc_info, "LOAD_CLOSURE"):
# It is possible that the name does not exist in the frame. Example:
#
# class Bar(Foo): # LOAD_CLOSURE, but the name `Bar` does exist in frame.
# def __init__(self):
# super(Bar, self).__init__()
#
# In this case, we ignore the value cause it doesn't matter.
try:
value = self._fetch_value_for_load_instruction(instr.argrepr, frame)
except AssertionError:
value = []
self._push(value)
def _LOAD_DEREF_handler(self, instr, frame, exc_info):
if self._instruction_successfully_executed(exc_info, "LOAD_DEREF"):
self._push(self._fetch_value_for_load_instruction(instr.argrepr, frame))
@emit_event
def _STORE_DEREF_handler(self, instr):
return self._STORE_NAME_handler(instr)
@emit_event
def _DELETE_DEREF_handler(self, instr, exc_info):
if self._instruction_successfully_executed(exc_info, "DELETE_DEREF"):
return EventInfo(type=Deletion, target=Symbol(instr.argrepr))
@emit_event
def _DELETE_FAST_handler(self, instr, exc_info):
if self._instruction_successfully_executed(exc_info, "DELETE_FAST"):
retu | def _LOAD_METHOD_handler(self, exc_info):
if self._instruction_successfully_executed(exc_info, "LOAD_METHOD"):
# NULL should be pushed if method lookup failed, but this would lead to an
# exception anyway, and should be very rare, so ignoring it.
# See https://docs.python.org/3/library/dis.html#opcode-LOAD_METHOD.
self._push(self.tos)
def _push_arguments_or_exception(self, callable_obj, args):
if utils.is_exception_class(callable_obj):
# In `raise IndexError()`
# We need to make sure the result of `IndexError()` is an exception inst,
# so that _do_raise sees the correct value type.
self._push(callable_obj())
else:
# Return value is a list containing the callable and all arguments
self._push(utils.flatten(callable_obj, args))
def _CALL_FUNCTION_handler(self, instr, exc_info):
args = self._pop(instr.arg)
callable_obj = self._pop()
if self._instruction_successfully_executed(exc_info, "CALL_FUNCTION"):
if utils.is_exception(args):
args = (args,)
self._push_arguments_or_exception(callable_obj, args)
def _CALL_FUNCTION_KW_handler(self, instr: Instruction, exc_info):
args_num = instr.arg
_ = self._pop() # A tuple of keyword argument names.
args = self._pop(args_num)
callable_obj = self._pop()
if self._instruction_successfully_executed(exc_info, "CALL_FUNCTION_KW"):
if utils.is_exception(args):
args = (args,)
self._push_arguments_or_exception(callable_obj, args)
def _CALL_FUNCTION_EX_handler(self, instr, exc_info):
kwargs = self._pop() if (instr.arg & 0x01) else []
args = self._pop()
args.extend(kwargs)
callable_obj = self._pop()
if self._instruction_successfully_executed(exc_info, "CALL_FUNCTION_EX"):
if utils.is_exception(args):
args = (args,)
self._push_arguments_or_exception(callable_obj, args)
@emit_event
def _CALL_METHOD_handler(self, instr, exc_info):
args = self._pop(instr.arg)
inst_or_callable = self._pop()
method_or_null = self._pop() # method or NULL
if self._instruction_successfully_executed(exc_info, "CALL_METHOD"):
if utils.is_exception(args):
args = (args,)
self._push(utils.flatten(inst_or_callable, method_or_null, *args))
# The real callable can be omitted for various reasons.
# See the _fetch_value_for_load method.
if not inst_or_callable:
return
# Actually, there could be multiple identifiers in inst_or_callable, but right
# now we'll assume there's just one, and improve it as part of fine-grained
# symbol tracing (main feature of version 3).
return EventInfo(
type=Mutation,
target=inst_or_callable[0],
sources=set(utils.flatten(args, inst_or_callable)),
)
def _MAKE_FUNCTION_handler(self, instr):
function_obj = []
function_obj.extend(self._pop()) # qualified_name
function_obj.extend(self._pop()) # code_obj
if instr.argval & 0x08:
function_obj.extend(self._pop()) # closure
if instr.argval & 0x04:
function_obj.extend(self._pop()) # annotations
if instr.argval & 0x02:
function_obj.extend(self._pop()) # kwargs defaults
if instr.argval & 0x01:
function_obj.extend(self._pop()) # args defaults
self._push(function_obj)
def _BUILD_SLICE_handler(self, instr):
if instr.arg == 2:
self._pop_n_push_one(2)
elif instr.arg == 3:
self._pop_n_push_one(3)
def _EXTENDED_ARG_handler(self, instr):
# Instruction.arg already contains the final value of arg, so this is a no op.
pass
def _FORMAT_VALUE_handler(self, instr, exc_info):
# See https://git.io/JvjTg to learn what this opcode is doing.
elements = []
if (instr.arg & 0x04) == 0x04:
elements.extend(self._pop())
elements.extend(self._pop())
if self._instruction_successfully_executed(exc_info, "FORMAT_VALUE"):
self._push(elements)
def _JUMP_FORWARD_handler(self, instr, jumped):
pass
@emit_event
def _POP_JUMP_IF_TRUE_handler(self, instr, jumped):
self._pop()
if jumped:
return self._return_jump_back_event_if_exists(instr)
@emit_event
def _POP_JUMP_IF_FALSE_handler(self, instr, jumped):
self._pop()
if jumped:
return self._return_jump_back_event_if_exists(instr)
@emit_event
def _JUMP_IF_TRUE_OR_POP_handler(self, instr, jumped):
if not jumped:
self._pop()
else:
return self._return_jump_back_event_if_exists(instr)
@emit_event
def _JUMP_IF_FALSE_OR_POP_handler(self, instr, jumped):
if not jumped:
self._pop()
else:
return self._return_jump_back_event_if_exists(instr)
@emit_event
def _JUMP_ABSOLUTE_handler(self, instr):
return self._return_jump_back_event_if_exists(instr)
@emit_event
def _GET_ITER_handler(self, instr, exc_info):
if self._instruction_successfully_executed(exc_info, "GET_ITER"):
return self._return_jump_back_event_if_exists(instr)
@emit_event
def _GET_YIELD_FROM_ITER_handler(self, instr):
"""Since the handling of generators is ad-hoc, for now we didn't handle
exceptions. We'll add it as part of a more well-thought-out generator
implementation.
"""
return self._return_jump_back_event_if_exists(instr)
@emit_event
def _FOR_ITER_handler(self, instr, jumped, exc_info: ExceptionInfo):
# If it's StopIteration, we assume it's OK.
if exc_info is None or exc_info.type is StopIteration:
if jumped:
self._pop()
else:
self._push(self.tos)
return self._return_jump_back_event_if_exists(instr)
else:
self._instruction_successfully_executed(exc_info, "FOR_ITER")
def _LOAD_BUILD_CLASS_handler(self):
self._push(_placeholder) # builtins.__build_class__()
def _SETUP_WITH_handler(self, exc_info):
if self._instruction_successfully_executed(exc_info, "SETUP_WITH"):
enter_func = self.tos
# We ignored the operation to replace context manager on tos with __exit__,
# because it is a noop in our stack.
self._push_block(BlockType.SETUP_FINALLY)
self._push(enter_func) # The return value of __enter__()
def _return_jump_back_event_if_exists(self, instr):
jump_target = utils.get_jump_target_or_none(instr)
if jump_target is not None and jump_target < instr.offset:
return EventInfo(type=JumpBackToLoopStart, jump_target=jump_target)
def _unwind_except_handler(self, b: Block):
assert self.stack_level >= b.b_level + 3
while self.stack_level > b.b_level + 3:
self._pop()
exc_type = self._pop()
value = self._pop()
tb = self._pop()
self.last_exception = ExceptionInfo(type=exc_type, value=value, traceback=tb)
def _do_raise(self, exc, cause) -> bool:
# See https://github.com/nedbat/byterun/blob/master/byterun/pyvm2.py#L806
if exc is None: # reraise
exc_type, val, tb = (
self.last_exception.type,
self.last_exception.value,
self.last_exception.traceback,
)
return exc_type is not None
elif type(exc) == type:
# As in `raise ValueError`
exc_type = exc
val = exc() # Make an instance.
elif isinstance(exc, BaseException):
# As in `raise ValueError('foo')`
val = exc
exc_type = type(exc)
else:
return False # error
# If you reach this point, you're guaranteed that
# val is a valid exception instance and exc_type is its class.
# Now do a similar thing for the cause, if present.
if cause:
if type(cause) == type:
cause = cause()
elif not isinstance(cause, BaseException):
return False
val.__cause__ = cause
self.last_exception = ExceptionInfo(
type=exc_type, value=val, traceback=val.__traceback__
)
return False
class Py37ValueStack(BaseValueStack):
"""Value stack for Python 3.7."""
def __init__(self):
self.why = Why.UNINITIALIZED
super().__init__()
def _store_exception(self, exc_info: ExceptionInfo):
"""When an exception is raised implicitly (aka not by calling `raise`), use
This method to propagate it as self.last_exception.
"""
self.last_exception = exc_info
self.why = Why.EXCEPTION
self._fast_block_end()
def _WITH_CLEANUP_FINISH_handler(self):
# For __exit__, returning a true value from this method will cause the with
# statement to suppress the exception and continue execution with the statement
# immediately following the with statement. Otherwise the exception continues
# propagating after this method has finished executing.
#
# res represents the return value, but there's no way CB can know its value.
# So we just assume res is true whenever there is an exception, because CB does
# not support unhandled exception, so it's safe to assume that if there's an
# exception raised in `with`, it is properly handled. e.g.
# with pytest.raises(TypeError)
res = self._pop()
exc = self._pop()
if res and utils.is_exception(exc):
self._push(Why.SILENCED)
def _RETURN_VALUE_handler(self):
self.return_value = self._pop()
self.why = Why.RETURN
self._fast_block_end()
def _YIELD_VALUE_handler(self):
super()._YIELD_VALUE_handler()
self.why = Why.YIELD
def _YIELD_FROM_handler(self):
super()._YIELD_FROM_handler()
self.why = Why.YIELD
def _SETUP_LOOP_handler(self):
self._push_block(BlockType.SETUP_LOOP)
def _SETUP_EXCEPT_handler(self):
self._push_block(BlockType.SETUP_EXCEPT)
def _SETUP_FINALLY_handler(self):
self._push_block(BlockType.SETUP_FINALLY)
def _POP_BLOCK_handler(self):
self._unwind_block(self._pop_block())
def _BREAK_LOOP_handler(self):
self.why = Why.BREAK
self._fast_block_end()
def _CONTINUE_LOOP_handler(self, instr):
self.return_value = instr.arg
assert self.return_value is not NULL
self.why = Why.CONTINUE
self._fast_block_end()
def _POP_EXCEPT_handler(self):
block = self._pop_block()
assert block.b_type is BlockType.EXCEPT_HANDLER
self._unwind_except_handler(block)
def _RAISE_VARARGS_handler(self, instr):
# Since in FrameLogger.handle_exception, we excluded implicit exceptions raised
# by executing RAISE_VARARGS and RERAISE, Cyberbrain can't handle exceptions
# caused by `raise 1` (TypeError). But since this should be super rare, it
# should be fine.
cause = exc = None
if instr.arg == 2:
cause, exc = self._pop(2)
elif instr.arg == 1:
exc = self._pop()
# In CPython's source code, it uses the result of _do_raise to decide whether to
# raise an exception, then execute exception_unwind. Our value stack doesn't
# need to actually raise an exception. If _do_raise returns false, it breaks
# out of the switch clause, then jumps to label "error", which is above
# _fast_block_end. So _fast_block_end will be executed anyway.
self._do_raise(exc, cause)
self.why = Why.EXCEPTION
self._fast_block_end()
# noinspection DuplicatedCode
def _WITH_CLEANUP_START_handler(self, exc_info):
exc = self.tos
exit_func: any
if not exc: # Checks if tos is None, which in our stack, is []
exit_func = self.stack.pop(-2)
elif isinstance(exc, Why):
if exc in {Why.RETURN, Why.CONTINUE}:
exit_func = self.stack.pop(-2) # why, ret_val, __exit__
else:
exit_func = self.stack.pop(-1) # why, __exit__
elif utils.is_exception_class(exc):
w, v, u = self._pop(3)
tp2, exc2, tb2 = self._pop(3)
exit_func = self._pop()
self._push(tp2, exc2, tb2)
self._push(None)
self._push(w, v, u)
block = self.block_stack.tos
assert block.b_type == BlockType.EXCEPT_HANDLER
block.b_level -= 1
else:
assert False, f"Unrecognized type: {exc}"
if self._instruction_successfully_executed(exc_info, "WITH_CLEANUP_START"):
self._push(exc)
self._push(exit_func)
def _END_FINALLY_handler(self, instr):
status = self._pop()
if isinstance(status, Why):
self.why = status
assert self.why not in {Why.YIELD, Why.EXCEPTION}
if self.why in {Why.RETURN, Why.CONTINUE}:
self.return_value = self._pop()
if self.why is Why.SILENCED:
block = self._pop_block()
assert block.b_type is BlockType.EXCEPT_HANDLER
self._unwind_except_handler(block)
self.why = Why.NOT
return
self._fast_block_end()
elif utils.is_exception_class(status):
exc_type = status
value = self._pop()
tb = self._pop()
self.last_exception = ExceptionInfo(
type=exc_type, value=value, traceback=tb
)
self.why = Why.EXCEPTION
self._fast_block_end()
assert status is not None
def _fast_block_end(self):
assert self.why is not Why.NOT
while self.block_stack.is_not_empty():
block = self.block_stack.tos
assert self.why is not Why.YIELD
if block.b_type is BlockType.SETUP_LOOP and self.why is Why.CONTINUE:
self.why = Why.NOT
break
self.block_stack.pop()
if block.b_type is BlockType.EXCEPT_HANDLER:
self._unwind_except_handler(block)
continue
self._unwind_block(block)
if block.b_type is BlockType.SETUP_LOOP and self.why is Why.BREAK:
self.why = Why.NOT
break
if self.why is Why.EXCEPTION and (
block.b_type in {BlockType.SETUP_EXCEPT, BlockType.SETUP_FINALLY}
):
self._push_block(BlockType.EXCEPT_HANDLER)
self._push(self.last_exception.traceback)
self._push(self.last_exception.value)
if self.last_exception.type is not NULL:
self._push(self.last_exception.type)
else:
self._push(None)
exc_type, value, tb = (
self.last_exception.type,
self.last_exception.value,
self.last_exception.traceback,
)
# PyErr_NormalizeException is ignored, add it if needed.
self._push(tb, value, exc_type)
self.why = Why.NOT
break
if block.b_type is BlockType.SETUP_FINALLY:
if self.why in {Why.RETURN, Why.CONTINUE}:
self._push(self.return_value)
self._push(self.why)
self.why = Why.NOT
break
class Py38ValueStack(Py37ValueStack):
"""Value stack for Python 3.8.
Note that the this class inherits from Py37ValueStack, and not GeneralValueStack.
This allows us to only override the methods that have changed in 3.8
"""
def _store_exception(self, exc_info: ExceptionInfo):
"""When an exception is raised implicitly (aka not by calling `raise`), use
This method to propagate it as self.last_exception.
TODO: Every instruction handler that may raise exceptions should call this
method.
"""
self.last_exception = exc_info
self._exception_unwind()
def _WITH_CLEANUP_FINISH_handler(self):
# For __exit__, returning a true value from this method will cause the with
# statement to suppress the exception and continue execution with the statement
# immediately following the with statement. Otherwise the exception continues
# propagating after this method has finished executing.
#
# res represents the return value, but there's no way CB can know its value.
# So we just assume res is true whenever there is an exception, because CB does
# not support unhandled exception, so it's safe to assume that if there's an
# exception raised in `with`, it is properly handled. e.g.
# with pytest.raises(TypeError)
res = self._pop()
exc = self._pop()
if res and utils.is_exception(exc):
block = self.block_stack.pop()
assert block.b_type == BlockType.EXCEPT_HANDLER
self._unwind_except_handler(block)
self._push(NULL)
def _RETURN_VALUE_handler(self):
self.return_value = self._pop()
# TODO: add exit_returning
def _SETUP_FINALLY_handler(self):
self._push_block(b_type=BlockType.SETUP_FINALLY)
def _RAISE_VARARGS_handler(self, instr):
cause = exc = None
if instr.arg == 2:
cause, exc = self._pop(2)
elif instr.arg == 1:
exc = self._pop()
# In CPython's source code, it uses the result of _do_raise to decide whether to
# raise an exception, then execute exception_unwind. Our value stack doesn't
# need to actually raise an exception. If _do_raise returns false, it breaks
# out of the switch clause, then jumps to label "error", which is above
# _exception_unwind. So _exception_unwind will be executed anyway.
self._do_raise(exc, cause)
self._exception_unwind()
def _POP_EXCEPT_handler(self):
block = self._pop_block()
assert block.b_type == BlockType.EXCEPT_HANDLER
assert block.b_level + 3 <= self.stack_level <= block.b_level + 4
exc_type = self._pop()
value = self._pop()
tb = self._pop()
self.last_exception = ExceptionInfo(type=exc_type, value=value, traceback=tb)
def _POP_FINALLY_handler(self, instr):
preserve_tos = instr.arg
if preserve_tos:
res = self._pop()
if self.tos is NULL or isinstance(self.tos, int):
_ = self._pop()
else:
_, _, _ = self._pop(3)
block = self._pop_block()
assert block.b_type is BlockType.EXCEPT_HANDLER
assert self.stack_level == block.b_level + 3
exc_type, value, tb = self._pop(3)
self.last_exception = ExceptionInfo(
type=exc_type, value=value, traceback=tb
)
if preserve_tos:
self._push(res)
def _POP_BLOCK_handler(self):
self._pop_block()
def _BEGIN_FINALLY_handler(self):
self._push(NULL)
def _END_FINALLY_handler(self, instr):
if self.tos is NULL or isinstance(self.tos, int):
self._pop()
elif utils.is_exception_class(self.tos):
exc_type = self._pop()
value = self._pop()
tb = self._pop()
self.last_exception = ExceptionInfo(
type=exc_type, value=value, traceback=tb
)
self._exception_unwind()
else:
raise ValueStackException(f"TOS has wrong value: {self.tos}")
def _WITH_CLEANUP_START_handler(self, exc_info):
exc = self.tos
if self.tos == NULL: # Pushed by BEGIN_FINALLY
exit_func = self.stack.pop(-2)
else:
w, v, u = self._pop(3)
tp2, exc2, tb2 = self._pop(3)
exit_func = self._pop()
self._push(tp2, exc2, tb2)
self._push(None)
self._push(w, v, u)
block = self.block_stack.tos
assert block.b_type == BlockType.EXCEPT_HANDLER
block.b_level -= 1
if self._instruction_successfully_executed(exc_info, "WITH_CLEANUP_START"):
self._push(exc)
self._push(exit_func)
def _exception_unwind(self):
while self.block_stack.is_not_empty():
block = self.block_stack.pop()
if block.b_type is BlockType.EXCEPT_HANDLER:
self._unwind_except_handler(block)
continue
self._unwind_block(block)
if block.b_type is BlockType.SETUP_FINALLY:
self._push_block(b_type=BlockType.EXCEPT_HANDLER)
exc_type, value, tb = (
self.last_exception.type,
self.last_exception.value,
self.last_exception.traceback,
)
self._push(tb, value, exc_type)
self._push(tb, value, exc_type)
break # goto main_loop.
class Py39ValueStack(Py38ValueStack):
def _JUMP_IF_NOT_EXC_MATCH_handler(self):
self._pop(2)
def _CONTAINS_OP_handler(self, exc_info):
self._BINARY_operation_handler(exc_info)
def _IS_OP_handler(self, exc_info):
self._BINARY_operation_handler(exc_info)
def _LOAD_ASSERTION_ERROR_handler(self):
self._push(AssertionError())
def _LIST_TO_TUPLE_handler(self, instr):
pass
def _LIST_EXTEND_handler(self, exc_info):
# list.extend(TOS1[-i], TOS), which essentially merges tos and tos1.
items = self._pop(2)
if self._instruction_successfully_executed(exc_info, "LIST_EXTEND"):
self._push(utils.flatten(items))
def _SET_UPDATE_handler(self, exc_info):
# list.extend(TOS1[-i], TOS), which essentially merges tos and tos1.
items = self._pop(2)
if self._instruction_successfully_executed(exc_info, "SET_UPDATE"):
self._push(utils.flatten(items))
def _DICT_UPDATE_handler(self, exc_info):
# dict.extend(TOS1[-i], TOS), which essentially merges tos and tos1.
items = self._pop(2)
if self._instruction_successfully_executed(exc_info, "DICT_UPDATE"):
self._push(utils.flatten(items))
def _DICT_MERGE_handler(self, exc_info):
items = self._pop(2)
if self._instruction_successfully_executed(exc_info, "DICT_MERGE"):
self._push(utils.flatten(items))
def _RERAISE_handler(self):
exc_type = self._pop()
value = self._pop()
tb = self._pop()
assert utils.is_exception_class(exc_type)
self.last_exception = ExceptionInfo(type=exc_type, value=value, traceback=tb)
self._exception_unwind()
def _WITH_EXCEPT_START_handler(self):
exit_func = self.stack[-7]
self._push(exit_func)
def create_value_stack():
version_info = sys.version_info[:2]
if version_info == (3, 7):
return Py37ValueStack()
elif version_info == (3, 8):
return Py38ValueStack()
elif version_info == (3, 9):
return Py39ValueStack()
else:
raise Exception(f"Unsupported Python version: {sys.version}")
| rn EventInfo(type=Deletion, target=Symbol(instr.argrepr))
|
nilcheck_test.go | package main
import (
"go/ast"
"go/parser"
"go/token"
"testing"
)
func | (t *testing.T) {
for _, test := range []struct {
input string
// nilerrors []string
}{
{
input: `
package main
func main() {
var f func()
f()
}
`,
},
} {
fset := token.NewFileSet() // positions are relative to fset
f, err := parser.ParseFile(fset, "", test.input, 0)
if err != nil {
t.Fatalf("err: %s, failed to parse: %s", err, test.input)
}
v := newVisitor(fset, test.input)
ast.Walk(v, f)
t.Logf("%s\n\n%s", test.input, v.idents)
for _, errLine := range v.uncheckedNil {
t.Logf("Pos: %s, ErrLine: %s\n", errLine.pos, errLine.line)
}
//ast.Print(fset, f)
t.Fatalf("fatal")
}
}
| TestWalk |
policy.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::{
common::{App, CheckOptions, CheckTiming, ProtocolState, UpdateCheckSchedule},
installer::Plan,
request_builder::RequestParams,
time::{ComplexTime, TimeSource},
};
use futures::future::BoxFuture;
| mod stub;
pub use stub::StubPolicy;
pub use stub::StubPolicyEngine;
/// Data about the local system that's needed to fulfill Policy questions
#[derive(Clone, Debug)]
pub struct PolicyData {
/// The current time at the start of the update
pub current_time: ComplexTime,
}
impl PolicyData {
/// Create and return a new builder for PolicyData.
pub fn builder() -> PolicyDataBuilder {
PolicyDataBuilder::default()
}
}
/// The PolicyDataBuilder uses the typestate pattern. The builder cannot be built until the time
/// has been specified (which changes the type of the builder).
#[derive(Debug, Default)]
pub struct PolicyDataBuilder;
/// The PolicyDataBuilder, once it has time set.
pub struct PolicyDataBuilderWithTime {
current_time: ComplexTime,
}
impl PolicyDataBuilder {
/// Use a `TimeSource` to set the `current_time`.
pub fn use_timesource<T: TimeSource>(self, timesource: &T) -> PolicyDataBuilderWithTime {
PolicyDataBuilderWithTime { current_time: timesource.now() }
}
/// Set the `current_time` explicitly from a given ComplexTime.
pub fn time(self, current_time: ComplexTime) -> PolicyDataBuilderWithTime {
PolicyDataBuilderWithTime { current_time }
}
}
/// These are the operations that can be performed once the time has been set.
impl PolicyDataBuilderWithTime {
/// Construct the PolicyData
pub fn build(self) -> PolicyData {
PolicyData { current_time: self.current_time }
}
}
/// Reasons why a check can/cannot be performed at this time
#[derive(Clone, Debug, PartialEq)]
pub enum CheckDecision {
/// positive responses
Ok(RequestParams),
/// but with caveats:
OkUpdateDeferred(RequestParams),
/// negative responses
TooSoon,
ThrottledByPolicy,
DeniedByPolicy,
}
#[cfg(test)]
impl Default for CheckDecision {
fn default() -> Self {
CheckDecision::Ok(RequestParams::default())
}
}
/// Reasons why an update can/cannot be performed at this time
#[derive(Clone, Debug, PartialEq)]
pub enum UpdateDecision {
/// Update can be performed.
Ok,
/// Update is deferred by Policy.
DeferredByPolicy,
/// Update is rejected by Policy.
DeniedByPolicy,
}
#[cfg(test)]
impl Default for UpdateDecision {
fn default() -> Self {
UpdateDecision::Ok
}
}
/// The policy implementation itself
pub trait Policy {
type UpdatePolicyData;
type RebootPolicyData;
type UpdateCanStartPolicyData;
/// When should the next update happen?
fn compute_next_update_time(
policy_data: &Self::UpdatePolicyData,
apps: &[App],
scheduling: &UpdateCheckSchedule,
protocol_state: &ProtocolState,
) -> CheckTiming;
/// Given the current State, and the current PolicyData, is an update check
/// allowed at this time. A CheckDecision is used to return the reasoning, as in
/// some cases, instead of an update check, the SM will instead notify Omaha that
/// it would perform an update, but instead just tell the device whether or not
/// an update is available.
fn update_check_allowed(
policy_data: &Self::UpdatePolicyData,
apps: &[App],
scheduling: &UpdateCheckSchedule,
protocol_state: &ProtocolState,
check_options: &CheckOptions,
) -> CheckDecision;
/// Given the current State, the current PolicyData, can the proposed InstallPlan
/// be executed at this time.
fn update_can_start(
policy_data: &Self::UpdateCanStartPolicyData,
proposed_install_plan: &impl Plan,
) -> UpdateDecision;
/// Given the current PolicyData, is reboot allowed right now.
fn reboot_allowed(policy_data: &Self::RebootPolicyData, check_options: &CheckOptions) -> bool;
/// Given the InstallPlan, is reboot needed after update has been installed.
fn reboot_needed(install_plan: &impl Plan) -> bool;
}
pub trait PolicyEngine {
type TimeSource: TimeSource + Clone;
type InstallResult;
/// Provides the time source used by the PolicyEngine to the state machine.
fn time_source(&self) -> &Self::TimeSource;
/// When should the next update happen?
fn compute_next_update_time(
&mut self,
apps: &[App],
scheduling: &UpdateCheckSchedule,
protocol_state: &ProtocolState,
) -> BoxFuture<'_, CheckTiming>;
/// Given the context provided by State, does the Policy allow an update check to
/// happen at this time? This should be consistent with the compute_next_update_time
/// so that during background updates, the result of compute_next_update_time will
/// result in a CheckDecision::Ok() value from this function.
fn update_check_allowed(
&mut self,
apps: &[App],
scheduling: &UpdateCheckSchedule,
protocol_state: &ProtocolState,
check_options: &CheckOptions,
) -> BoxFuture<'_, CheckDecision>;
/// Given the current State, the current PolicyData, can the proposed InstallPlan
/// be executed at this time.
fn update_can_start<'p>(
&mut self,
proposed_install_plan: &'p impl Plan,
) -> BoxFuture<'p, UpdateDecision>;
/// Is reboot allowed right now.
fn reboot_allowed(
&mut self,
check_options: &CheckOptions,
install_result: &Self::InstallResult,
) -> BoxFuture<'_, bool>;
/// Given the InstallPlan, is reboot needed after update has been installed.
fn reboot_needed(&mut self, install_plan: &impl Plan) -> BoxFuture<'_, bool>;
}
#[cfg(test)]
mod test {
use super::*;
use crate::time::MockTimeSource;
#[test]
pub fn test_policy_data_builder_with_system_time() {
let current_time = MockTimeSource::new_from_now().now();
let policy_data = PolicyData::builder().time(current_time).build();
assert_eq!(policy_data.current_time, current_time);
}
#[test]
pub fn test_policy_data_builder_with_clock() {
let source = MockTimeSource::new_from_now();
let current_time = source.now();
let policy_data = PolicyData::builder().use_timesource(&source).build();
assert_eq!(policy_data.current_time, current_time);
}
} | #[cfg(test)]
mod mock;
#[cfg(test)]
pub use mock::MockPolicyEngine; |
zip_installer.go | // Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
// or more contributor license agreements. Licensed under the Elastic License;
// you may not use this file except in compliance with the Elastic License.
package zip
import (
"archive/zip"
"fmt"
"os"
"os/exec"
"path/filepath"
"github.com/elastic/beats/v7/x-pack/agent/pkg/agent/errors"
"github.com/elastic/beats/v7/x-pack/agent/pkg/artifact"
)
const (
// powershellCmdTemplate uses elevated execution policy to avoid failure in case script execution is disabled on the system
powershellCmdTemplate = `set-executionpolicy unrestricted; cd %s; .\install-service-%s.ps1`
)
// Installer or zip packages
type Installer struct {
config *artifact.Config
}
// NewInstaller creates an installer able to install zip packages
func NewInstaller(config *artifact.Config) (*Installer, error) {
return &Installer{
config: config,
}, nil
}
// Install performs installation of program in a specific version.
// It expects package to be already downloaded.
func (i *Installer) Install(programName, version, installDir string) error {
artifactPath, err := artifact.GetArtifactPath(programName, version, i.config.OS(), i.config.Arch(), i.config.TargetDirectory)
if err != nil {
return err
}
if err := i.unzip(artifactPath, programName, version); err != nil {
return err
}
rootDir, err := i.getRootDir(artifactPath)
if err != nil {
return err
}
// if root directory is not the same as desired directory rename
// e.g contains `-windows-` or `-SNAPSHOT-`
if rootDir != installDir {
if err := os.Rename(rootDir, installDir); err != nil {
return errors.New(err, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, installDir))
}
}
return i.runInstall(programName, version, installDir)
}
func (i *Installer) unzip(artifactPath, programName, version string) error {
if _, err := os.Stat(artifactPath); err != nil {
return errors.New(fmt.Sprintf("artifact for '%s' version '%s' could not be found at '%s'", programName, version, artifactPath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, artifactPath))
}
powershellArg := fmt.Sprintf("Expand-Archive -LiteralPath \"%s\" -DestinationPath \"%s\"", artifactPath, i.config.InstallPath)
installCmd := exec.Command("powershell", "-command", powershellArg)
return installCmd.Run()
}
func (i *Installer) runInstall(programName, version, installPath string) error {
powershellCmd := fmt.Sprintf(powershellCmdTemplate, installPath, programName)
installCmd := exec.Command("powershell", "-command", powershellCmd)
return installCmd.Run()
}
// retrieves root directory from zip archive
func (i *Installer) getRootDir(zipPath string) (dir string, err error) {
defer func() {
if dir != "" {
dir = filepath.Join(i.config.InstallPath, dir)
}
}()
zipReader, err := zip.OpenReader(zipPath)
if err != nil {
return "", err
}
defer zipReader.Close()
var rootDir string
for _, f := range zipReader.File {
if filepath.Base(f.Name) == filepath.Dir(f.Name) |
if currentDir := filepath.Dir(f.Name); rootDir == "" || len(currentDir) < len(rootDir) {
rootDir = currentDir
}
}
return rootDir, nil
}
| {
return f.Name, nil
} |
__init__.py | """Support for Timers."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.const import CONF_ICON, CONF_NAME, SERVICE_RELOAD
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.helpers.service
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DOMAIN = "timer"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
DEFAULT_DURATION = timedelta(0)
ATTR_DURATION = "duration"
ATTR_REMAINING = "remaining"
CONF_DURATION = "duration"
STATUS_IDLE = "idle"
STATUS_ACTIVE = "active"
STATUS_PAUSED = "paused"
EVENT_TIMER_FINISHED = "timer.finished"
EVENT_TIMER_CANCELLED = "timer.cancelled"
EVENT_TIMER_STARTED = "timer.started"
EVENT_TIMER_RESTARTED = "timer.restarted"
EVENT_TIMER_PAUSED = "timer.paused"
SERVICE_START = "start"
SERVICE_PAUSE = "pause"
SERVICE_CANCEL = "cancel"
SERVICE_FINISH = "finish"
def _none_to_empty_dict(value):
if value is None:
return {}
return value
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: cv.schema_with_slug_keys( | _none_to_empty_dict,
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(
CONF_DURATION, default=DEFAULT_DURATION
): cv.time_period,
},
)
)
},
extra=vol.ALLOW_EXTRA,
)
RELOAD_SERVICE_SCHEMA = vol.Schema({})
async def async_setup(hass, config):
"""Set up a timer."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
entities = await _async_process_config(hass, config)
async def reload_service_handler(service_call):
"""Remove all input booleans and load new ones from config."""
conf = await component.async_prepare_reload()
if conf is None:
return
new_entities = await _async_process_config(hass, conf)
if new_entities:
await component.async_add_entities(new_entities)
homeassistant.helpers.service.async_register_admin_service(
hass,
DOMAIN,
SERVICE_RELOAD,
reload_service_handler,
schema=RELOAD_SERVICE_SCHEMA,
)
component.async_register_entity_service(
SERVICE_START,
{vol.Optional(ATTR_DURATION, default=DEFAULT_DURATION): cv.time_period},
"async_start",
)
component.async_register_entity_service(SERVICE_PAUSE, {}, "async_pause")
component.async_register_entity_service(SERVICE_CANCEL, {}, "async_cancel")
component.async_register_entity_service(SERVICE_FINISH, {}, "async_finish")
if entities:
await component.async_add_entities(entities)
return True
async def _async_process_config(hass, config):
"""Process config and create list of entities."""
entities = []
for object_id, cfg in config[DOMAIN].items():
if not cfg:
cfg = {}
name = cfg.get(CONF_NAME)
icon = cfg.get(CONF_ICON)
duration = cfg[CONF_DURATION]
entities.append(Timer(hass, object_id, name, icon, duration))
return entities
class Timer(RestoreEntity):
"""Representation of a timer."""
def __init__(self, hass, object_id, name, icon, duration):
"""Initialize a timer."""
self.entity_id = ENTITY_ID_FORMAT.format(object_id)
self._name = name
self._state = STATUS_IDLE
self._duration = duration
self._remaining = self._duration
self._icon = icon
self._hass = hass
self._end = None
self._listener = None
@property
def should_poll(self):
"""If entity should be polled."""
return False
@property
def name(self):
"""Return name of the timer."""
return self._name
@property
def icon(self):
"""Return the icon to be used for this entity."""
return self._icon
@property
def state(self):
"""Return the current value of the timer."""
return self._state
@property
def state_attributes(self):
"""Return the state attributes."""
return {
ATTR_DURATION: str(self._duration),
ATTR_REMAINING: str(self._remaining),
}
async def async_added_to_hass(self):
"""Call when entity is about to be added to Home Assistant."""
# If not None, we got an initial value.
if self._state is not None:
return
state = await self.async_get_last_state()
self._state = state and state.state == state
async def async_start(self, duration):
"""Start a timer."""
if self._listener:
self._listener()
self._listener = None
newduration = None
if duration:
newduration = duration
event = EVENT_TIMER_STARTED
if self._state == STATUS_PAUSED:
event = EVENT_TIMER_RESTARTED
self._state = STATUS_ACTIVE
start = dt_util.utcnow()
if self._remaining and newduration is None:
self._end = start + self._remaining
else:
if newduration:
self._duration = newduration
self._remaining = newduration
else:
self._remaining = self._duration
self._end = start + self._duration
self._hass.bus.async_fire(event, {"entity_id": self.entity_id})
self._listener = async_track_point_in_utc_time(
self._hass, self.async_finished, self._end
)
await self.async_update_ha_state()
async def async_pause(self):
"""Pause a timer."""
if self._listener is None:
return
self._listener()
self._listener = None
self._remaining = self._end - dt_util.utcnow()
self._state = STATUS_PAUSED
self._end = None
self._hass.bus.async_fire(EVENT_TIMER_PAUSED, {"entity_id": self.entity_id})
await self.async_update_ha_state()
async def async_cancel(self):
"""Cancel a timer."""
if self._listener:
self._listener()
self._listener = None
self._state = STATUS_IDLE
self._end = None
self._remaining = timedelta()
self._hass.bus.async_fire(EVENT_TIMER_CANCELLED, {"entity_id": self.entity_id})
await self.async_update_ha_state()
async def async_finish(self):
"""Reset and updates the states, fire finished event."""
if self._state != STATUS_ACTIVE:
return
self._listener = None
self._state = STATUS_IDLE
self._remaining = timedelta()
self._hass.bus.async_fire(EVENT_TIMER_FINISHED, {"entity_id": self.entity_id})
await self.async_update_ha_state()
async def async_finished(self, time):
"""Reset and updates the states, fire finished event."""
if self._state != STATUS_ACTIVE:
return
self._listener = None
self._state = STATUS_IDLE
self._remaining = timedelta()
self._hass.bus.async_fire(EVENT_TIMER_FINISHED, {"entity_id": self.entity_id})
await self.async_update_ha_state() | vol.All( |
characterisation.py | # -*- coding: utf-8 -*-
"""
Characterisation Plotting
=========================
Defines the characterisation plotting objects:
- :func:`colour.plotting.plot_single_colour_checker`
- :func:`colour.plotting.plot_multi_colour_checkers`
"""
from __future__ import division
import numpy as np
from colour.models import xyY_to_XYZ
from colour.plotting import (
CONSTANTS_COLOUR_STYLE, ColourSwatch, XYZ_to_plotting_colourspace, artist,
filter_colour_checkers, plot_multi_colour_swatches, override_style, render)
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2020 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = '[email protected]'
__status__ = 'Production'
__all__ = ['plot_single_colour_checker', 'plot_multi_colour_checkers']
@override_style(
**{
'axes.grid': False,
'xtick.bottom': False,
'ytick.left': False,
'xtick.labelbottom': False,
'ytick.labelleft': False,
})
def plot_single_colour_checker(
colour_checker='ColorChecker24 - After November 2014', **kwargs):
"""
Plots given colour checker.
Parameters
----------
colour_checker : unicode or ColourChecker, optional
Color checker to plot. ``colour_checker`` can be of any type or form
supported by the
:func:`colour.plotting.filter_colour_checkers` definition.
Other Parameters
----------------
\\**kwargs : dict, optional
{:func:`colour.plotting.artist`,
:func:`colour.plotting.plot_multi_colour_swatches`,
:func:`colour.plotting.render`},
Please refer to the documentation of the previously listed definitions.
Returns
-------
tuple
Current figure and axes.
Examples
--------
>>> plot_single_colour_checker('ColorChecker 2005') # doctest: +ELLIPSIS
(<Figure size ... with 1 Axes>, <...AxesSubplot...>)
.. image:: ../_static/Plotting_Plot_Single_Colour_Checker.png
:align: center
:alt: plot_single_colour_checker
"""
return plot_multi_colour_checkers([colour_checker], **kwargs)
@override_style(
**{
'axes.grid': False,
'xtick.bottom': False,
'ytick.left': False,
'xtick.labelbottom': False,
'ytick.labelleft': False,
})
def plot_multi_colour_checkers(colour_checkers, **kwargs):
| """
Plots and compares given colour checkers.
Parameters
----------
colour_checkers : unicode or ColourChecker or array_like
Color checker to plot, count must be less than or equal to 2.
``colour_checkers`` elements can be of any type or form supported by
the :func:`colour.plotting.filter_colour_checkers` definition.
Other Parameters
----------------
\\**kwargs : dict, optional
{:func:`colour.plotting.artist`,
:func:`colour.plotting.plot_multi_colour_swatches`,
:func:`colour.plotting.render`},
Please refer to the documentation of the previously listed definitions.
Returns
-------
tuple
Current figure and axes.
Examples
--------
>>> plot_multi_colour_checkers(['ColorChecker 1976', 'ColorChecker 2005'])
... # doctest: +ELLIPSIS
(<Figure size ... with 1 Axes>, <...AxesSubplot...>)
.. image:: ../_static/Plotting_Plot_Multi_Colour_Checkers.png
:align: center
:alt: plot_multi_colour_checkers
"""
assert len(colour_checkers) <= 2, (
'Only two colour checkers can be compared at a time!')
colour_checkers = filter_colour_checkers(colour_checkers).values()
_figure, axes = artist(**kwargs)
compare_swatches = len(colour_checkers) == 2
colour_swatches = []
colour_checker_names = []
for colour_checker in colour_checkers:
colour_checker_names.append(colour_checker.name)
for label, xyY in colour_checker.data.items():
XYZ = xyY_to_XYZ(xyY)
RGB = XYZ_to_plotting_colourspace(XYZ, colour_checker.illuminant)
colour_swatches.append(
ColourSwatch(label.title(), np.clip(np.ravel(RGB), 0, 1)))
if compare_swatches:
colour_swatches = [
swatch
for pairs in zip(colour_swatches[0:len(colour_swatches) // 2],
colour_swatches[len(colour_swatches) // 2:])
for swatch in pairs
]
background_colour = '0.1'
width = height = 1.0
spacing = 0.25
columns = 6
settings = {
'axes': axes,
'width': width,
'height': height,
'spacing': spacing,
'columns': columns,
'direction': '-y',
'text_kwargs': {
'size': 8
},
'background_colour': background_colour,
'compare_swatches': 'Stacked' if compare_swatches else None,
}
settings.update(kwargs)
settings['standalone'] = False
plot_multi_colour_swatches(colour_swatches, **settings)
axes.text(
0.5,
0.005,
'{0} - {1} - Colour Rendition Chart'.format(
', '.join(colour_checker_names),
CONSTANTS_COLOUR_STYLE.colour.colourspace.name),
transform=axes.transAxes,
color=CONSTANTS_COLOUR_STYLE.colour.bright,
ha='center',
va='bottom')
settings.update({
'axes': axes,
'standalone': True,
'title': ', '.join(colour_checker_names),
})
return render(**settings) |
|
items.py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class | (scrapy.Item):
image_urls = scrapy.Field()
images = scrapy.Field()
breed = scrapy.Field()
link = scrapy.Field()
desc = scrapy.Field()
thumb = scrapy.Field()
class WikiItem(scrapy.Item):
image_urls = scrapy.Field()
images = scrapy.Field()
breed = scrapy.Field()
link = scrapy.Field()
desc = scrapy.Field() | AKCItem |
77.54b545fc.js | (window.webpackJsonp=window.webpackJsonp||[]).push([[77],{1297:function(t,i,s){"use strict";s(796)},1619:function(t,i,s){"use strict";s.r(i);var e={name:"bridRotate",data:function(){return{}}},n=(s(1297),s(12)),a=Object(n.a)(e,(function(){var t=this.$createElement;this._self._c;return this._m(0)}),[function(){var t=this.$createElement,i=this._self._c||t;return i("div",[i("div",{staticClass:"bird"},[i("div",{staticClass:"big"}),this._v(" "),i("div",{staticClass:"middle"}),this._v(" "),i("div",{staticClass:"small"})])])}],!1,null,"e9a76560",null);i.default=a.exports},796:function(t,i,s){}}]); |
||
lib.rs | //! Parser for Gmsh mesh files using the MSH file format version 4.1
//!
//! The library supports parsing ASCII and binary encoded MSH files adhering to the MSH file format
//! version 4.1 as specified in the [Gmsh documention](http://gmsh.info/doc/texinfo/gmsh.html#MSH-file-format).
//!
//! ```
//! use std::error::Error;
//! use std::fs;
//!
//! fn main() -> Result<(), Box<dyn Error>> {
//! // Try to read and parse a MSH file
//! let msh_bytes = fs::read("tests/data/sphere_coarse.msh")?;
//! let parser_result = mshio::parse_msh_bytes(msh_bytes.as_slice());
//!
//! // Note that the a parser error cannot be propagated directly using the ?-operator, as it
//! // contains a reference into the u8 slice where the error occurred.
//! let msh = parser_result.map_err(|e| format!("Error while parsing:\n{}", e))?;
//! assert_eq!(msh.total_element_count(), 891);
//!
//! Ok(())
//! }
//! ```
//!
//! If parsing was successful, the [`parse_msh_bytes`](fn.parse_msh_bytes.html) function returns a
//! [`MshFile`](mshfile/struct.MshFile.html) instance. The structure of `MshFile` closely mirrors
//! the MSH format specification. For example the `MeshData` associated to a `MshFile` may contain an
//! optional [`Elements`](mshfile/struct.Elements.html) section. This `Elements` section can contain
//! an arbitray number of [`ElementBlock`](mshfile/struct.ElementBlock.html) instances, where each
//! `ElementBlock` only contains elements of the same type and dimension.
//!
//! Currently, only the following sections of MSH files are actually parsed: `Entities`, `Nodes`,
//! `Elements`. All other sections are silently ignored, if they follow the pattern of being
//! delimited by `$SectionName` and `$EndSectionName` (in accordance to the MSH format specification).
//!
//! Note that the actual values are not checked for consistency beyond what is defined in the MSH format specification.
//! This means, that a parsed element may refer to node indices that are not present in the node section (if the MSH file already contains
//! such an inconsistency). In the future, utility functions may be added to check this.
//!
//! Although the `MshFile` struct and all related structs are generic over their value types,
//! the `parse_msh_bytes` function enforces the usage of `u64`, `i32` and `f64` as output value types
//! corresponding to the MSH input value types `size_t`, `int` and `double`
//! (of course `size_t` values will still be parsed as having the size specified in the file header).
//! We did not encounter MSH files using different types (e.g. 64 bit integers or 32 bit floats) and therefore cannot test it.
//! In addition, the MSH format specification does not specify the size of the float and integer types.
//! If the user desires narrowing conversions, they should be performed manually after parsing the file.
//!
//! Note that when loading collections of elements/nodes and other entities, the parser checks if
//! the number of these objects can be represented in the system's `usize` type. If this is not the
//! case it returns an error as they cannot be stored in a `Vec` in this case.
//!
use std::convert::{TryFrom, TryInto};
use nom::bytes::complete::tag;
use nom::character::complete::{alpha0, char};
use nom::combinator::peek;
use nom::sequence::{delimited, preceded, terminated};
use nom::IResult;
/// Error handling components of the parser
#[allow(unused)]
pub mod error;
/// Contains all types that are used to represent the structure of parsed MSH files
///
/// The central type is [`MshFile`](struct.MshFile.html) which contains the whole structure of the
/// parsed mesh.
pub mod mshfile;
/// Parser utility functions used by this MSH parser (may be private in the future)
pub mod parsers;
/// Error type returned by the MSH parser if parsing fails without panic
pub use error::MshParserError;
/// Re-exports all types that are used to represent the structure of an MSH file
pub use mshfile::*;
use crate::error::{make_error, MapMshError, MshParserErrorKind};
use error::{always_error, context};
use parsers::{br, take_sp};
use parsers::{
parse_element_section, parse_entity_section, parse_header_section, parse_node_section,
};
// TODO: Error instead of panic on num_parser construction if size of the data type is not supported
// TODO: Reconsider naming of the MshUsizeT etc. and num parser trait names, make them consistent
// TODO: Doc strings for the new num_parser trait interface
// TODO: Make section parsers generic over data types (i.e. don't mandate f64, u64, i32)
// TODO: Unify element and node section parsing
// (e.g. a single section parser, then per section type one header and one content parser)
// TODO: Unify entity parsing (currently, point parsers and the curve/surface/volume parsers are separate)
// TODO: Implement parser for physical groups
// TODO: Log in the MeshData struct which unknown sections were ignored
// TODO: Add more .context() calls/more specialized errors
// TODO: Replace remaining unimplemented!/expect calls with errors
// TODO: Test the float values parsed from a binary MSH file
// TODO: Add tests of errors in node section
// TODO: Add tests of errors in entity section
// TODO: Add tests that try to parse a mesh with u64 indices to u32
/// Try to parse a MshFile from a slice of bytes
///
/// The input can be the content of an ASCII or binary encoded MSH file of file format version 4.1.
impl<'a> TryFrom<&'a [u8]> for MshFile<u64, i32, f64> {
type Error = MshParserError<&'a [u8]>;
fn try_from(value: &'a [u8]) -> Result<Self, Self::Error> {
match private_parse_msh_bytes(value) {
Ok((_, file)) => Ok(file),
Err(e) => Err(e.into()),
}
}
}
/// Try to parse a [`MshFile`](mshfile/struct.MshFile.html) from a slice of bytes
///
/// The input can be the content of an ASCII or binary encoded MSH file of file format version 4.1.
pub fn parse_msh_bytes<'a>(
input: &'a [u8],
) -> Result<MshFile<u64, i32, f64>, MshParserError<&'a [u8]>> |
fn private_parse_msh_bytes<'a>(
input: &'a [u8],
) -> IResult<&'a [u8], MshFile<u64, i32, f64>, MshParserError<&'a [u8]>> {
let (input, (header, parsers)) = context(
"MSH file header section",
parsers::parse_delimited_block(
terminated(tag("$MeshFormat"), br),
terminated(tag("$EndMeshFormat"), br),
context("MSH format header content", parse_header_section),
),
)(input)?;
// Closure to detect a line with a section start tag
let section_detected = |start_tag, input| {
peek::<_, _, (), _>(delimited(take_sp, tag(start_tag), br))(input).is_ok()
};
// Macro to apply a parser to a section delimited by start and end tags
macro_rules! parse_section {
($start_tag:expr, $end_tag:expr, $parser:expr, $input:expr) => {{
delimited(
delimited(take_sp, tag($start_tag), br),
$parser,
delimited(take_sp, tag($end_tag), take_sp),
)($input)
}};
}
let mut entity_sections = Vec::new();
let mut node_sections = Vec::new();
let mut element_sections = Vec::new();
let mut input = input;
// Loop over all sections of the mesh file
while !parsers::eof::<_, ()>(input).is_ok() {
// Check for entity section
if section_detected("$Entities", input) {
let (input_, entities) = parse_section!(
"$Entities",
"$EndEntities",
|i| context("entity section", parse_entity_section(&parsers))(i),
input
)?;
entity_sections.push(entities);
input = input_;
}
// Check for node section
else if section_detected("$Nodes", input) {
let (input_, nodes) = parse_section!(
"$Nodes",
"$EndNodes",
|i| context("node section", parse_node_section(&parsers))(i),
input
)?;
node_sections.push(nodes);
input = input_;
}
// Check for elements section
else if section_detected("$Elements", input) {
let (input_, elements) = parse_section!(
"$Elements",
"$EndElements",
|i| context("element section", parse_element_section(&parsers))(i),
input
)?;
element_sections.push(elements);
input = input_;
}
// Check for unknown section (gets ignored)
else if let Ok((input_, section_header)) =
peek::<_, _, (), _>(preceded(take_sp, delimited(char('$'), alpha0, br)))(input)
{
let section_header = String::from_utf8_lossy(section_header);
let section_start_tag = format!("${}", section_header);
let section_end_tag = format!("$End{}", section_header);
let (input_, _) = parsers::delimited_block(
delimited(take_sp, tag(§ion_start_tag[..]), br),
delimited(take_sp, tag(§ion_end_tag[..]), take_sp),
)(input_)?;
input = input_;
}
// Check for invalid lines
else {
return always_error(MshParserErrorKind::InvalidSectionHeader)(input);
}
}
// TODO: Replace the unimplemented! calls with errors
let entities = match entity_sections.len() {
1 => Some(entity_sections.remove(0)),
0 => None,
_ => {
return Err(make_error(input, MshParserErrorKind::Unimplemented)
.with_context(input, "Multiple entity sections found in the MSH file, this cannot be handled at the moment."))
}
};
let nodes = match node_sections.len() {
1 => Some(node_sections.remove(0)),
0 => None,
_ => return Err(make_error(input, MshParserErrorKind::Unimplemented)
.with_context(input, "Multiple node sections found in the MSH file, this cannot be handled at the moment.")),
};
let elements = match element_sections.len() {
1 => Some(element_sections.remove(0)),
0 => None,
_ => return Err(make_error(input, MshParserErrorKind::Unimplemented)
.with_context(input, "Multiple element sections found in the MSH file, this cannot be handled at the moment.")),
};
Ok((
input,
MshFile {
header,
data: MshData {
entities,
nodes,
elements,
},
},
))
}
| {
input.try_into()
} |
buffer.rs | use super::alloc::{allocate, deallocate, reallocate, Allocator};
use super::capacity::{CapacityPolicy, DefaultCapacityPolicy};
use super::collection::Collection;
use super::deque::Deque;
use super::list::{List, ListBase, ListMut, ListMutBase};
use super::os::OSAllocator;
use super::ptr::{copy, get, get_mut, read, write};
use core::marker::PhantomData;
#[derive(List, ListMut)]
pub struct Buffer<T, P: CapacityPolicy = DefaultCapacityPolicy, A: Allocator = OSAllocator> {
front: isize,
back: isize,
capacity: isize,
data: *mut T,
allocator: A,
_policy: PhantomData<P>,
}
impl<T, P: CapacityPolicy, A: Allocator + Default> Buffer<T, P, A> {
pub fn new() -> Self {
Self::new_with_capacity(0)
}
pub fn new_with_capacity(capacity: isize) -> Self {
let capacity = P::initial(capacity) + 1;
let mut allocator = Default::default();
let data = unsafe { allocate(&mut allocator, capacity) };
Buffer {
front: 0,
back: 0,
capacity,
data,
allocator,
_policy: PhantomData,
}
}
}
impl<T, P: CapacityPolicy, A: Allocator + Default> Default for Buffer<T, P, A> {
fn default() -> Self {
Self::new()
}
}
impl<T, P: CapacityPolicy, A: Allocator> Buffer<T, P, A> {
fn increase_index(&self, index: isize) -> isize {
if index + 1 == self.capacity {
0
} else {
index + 1
}
}
fn decrease_index(&self, index: isize) -> isize {
if index == 0 {
self.capacity - 1
} else {
index - 1
}
}
fn grow_to(&mut self, new_capacity: isize) {
self.data = unsafe { reallocate(&mut self.allocator, self.data, new_capacity) };
if self.back < self.front {
let grow = new_capacity - self.capacity;
unsafe {
copy(
self.data,
self.front,
self.front + grow,
self.capacity - self.front,
)
};
self.front += grow;
}
self.capacity = new_capacity;
}
fn shrink_to(&mut self, new_capacity: isize) {
if self.back < self.front {
let shrink = self.capacity - new_capacity;
unsafe {
copy(
self.data,
self.front,
self.front - shrink,
self.capacity - self.front,
)
};
self.front -= shrink;
} else if self.back > new_capacity {
let size = self.back - self.front;
unsafe { copy(self.data, self.front, 0, size) };
self.front = 0;
self.back = size;
}
self.data = unsafe { reallocate(&mut self.allocator, self.data, new_capacity) };
self.capacity = new_capacity;
}
fn is_full(&self) -> bool {
self.increase_index(self.back) == self.front
}
fn | (&mut self) {
let new_capacity = P::grow(self.capacity - 1) + 1;
self.grow_to(new_capacity);
}
fn shrink(&mut self) {
let new_capacity = P::shrink(Collection::size(self), self.capacity - 1) + 1;
if new_capacity < self.capacity {
self.shrink_to(new_capacity);
}
}
}
impl<T, P: CapacityPolicy, A: Allocator> Collection for Buffer<T, P, A> {
fn size(&self) -> isize {
if self.front <= self.back {
self.back - self.front
} else {
self.back + self.capacity - self.front
}
}
}
impl<T, P: CapacityPolicy, A: Allocator> ListBase for Buffer<T, P, A> {
type Elem = T;
#[cfg_attr(feature = "cargo-clippy", allow(collapsible_if))]
fn get(&self, index: isize) -> Option<&T> {
if self.front <= self.back {
if self.front + index >= self.back {
None
} else {
Some(unsafe { get(self.data, self.front + index) })
}
} else {
if self.front + index >= self.back + self.capacity {
None
} else if self.front + index >= self.capacity {
Some(unsafe { get(self.data, self.front + index - self.capacity) })
} else {
Some(unsafe { get(self.data, self.front + index) })
}
}
}
}
impl<T, P: CapacityPolicy, A: Allocator> ListMutBase for Buffer<T, P, A> {
#[cfg_attr(feature = "cargo-clippy", allow(collapsible_if))]
fn get_mut(&mut self, index: isize) -> Option<&mut T> {
if self.front <= self.back {
if self.front + index >= self.back {
None
} else {
Some(unsafe { get_mut(self.data, self.front + index) })
}
} else {
if self.front + index >= self.back + self.capacity {
None
} else if self.front + index >= self.capacity {
Some(unsafe { get_mut(self.data, self.front + index - self.capacity) })
} else {
Some(unsafe { get_mut(self.data, self.front + index) })
}
}
}
}
impl<T, P: CapacityPolicy, A: Allocator> Deque for Buffer<T, P, A> {
type Elem = T;
fn is_empty(&self) -> bool {
self.front == self.back
}
fn push_front(&mut self, elem: T) {
if self.is_full() {
self.grow();
}
self.front = self.decrease_index(self.front);
unsafe { write(self.data, self.front, elem) };
}
fn pop_front(&mut self) -> T {
if self.is_empty() {
panic!("empty");
}
let elem = unsafe { read(self.data, self.front) };
self.front = self.increase_index(self.front);
self.shrink();
elem
}
fn push_back(&mut self, elem: T) {
if self.is_full() {
self.grow();
}
unsafe { write(self.data, self.back, elem) };
self.back = self.increase_index(self.back);
}
fn pop_back(&mut self) -> T {
if self.is_empty() {
panic!("empty");
}
self.back = self.decrease_index(self.back);
let elem = unsafe { read(self.data, self.back) };
self.shrink();
elem
}
}
impl<T, P: CapacityPolicy, A: Allocator> Drop for Buffer<T, P, A> {
fn drop(&mut self) {
if self.back < self.front {
for i in 0..self.back {
unsafe { read(self.data, i) };
}
for i in self.front..self.capacity {
unsafe { read(self.data, i) };
}
} else {
for i in self.front..self.back {
unsafe { read(self.data, i) };
}
}
unsafe { deallocate(&mut self.allocator, self.data) };
}
}
#[macro_export]
macro_rules! buffer {
() => {
&mut $crate::buffer::Buffer::<_, $crate::capacity::DefaultCapacityPolicy>::new()
};
}
| grow |
context.go | package genericclioptions
import (
"errors"
"fmt"
"github.com/redhat-developer/odo/pkg/devfile"
"github.com/redhat-developer/odo/pkg/devfile/location"
"github.com/redhat-developer/odo/pkg/devfile/validate"
"github.com/redhat-developer/odo/pkg/localConfigProvider"
"github.com/redhat-developer/odo/pkg/odo/cmdline"
"github.com/redhat-developer/odo/pkg/odo/util"
odoutil "github.com/redhat-developer/odo/pkg/util"
"github.com/spf13/cobra"
"github.com/redhat-developer/odo/pkg/envinfo"
"github.com/redhat-developer/odo/pkg/kclient"
)
const (
// defaultAppName is the default name of the application when an application name is not provided
defaultAppName = "app"
// gitDirName is the git dir name in a project
gitDirName = ".git"
)
// Context holds contextual information useful to commands such as correctly configured client, target project and application
// (based on specified flag values) and provides for a way to retrieve a given component given this context
type Context struct {
internalCxt
}
// internalCxt holds the actual context values and is not exported so that it cannot be instantiated outside of this package.
// This ensures that Context objects are always created properly via NewContext factory functions.
type internalCxt struct {
// project used for the command, either passed with the `--project` flag, or the current one by default
project string
// application used for the command, either passed with the `--app` flag, or the current one by default
application string
// component used for the command, either passed with the `--component` flag, or the current one by default
component string
// componentContext is the value passed with the `--context` flag
componentContext string
// outputFlag is the value passed with the `-o` flag
outputFlag string
// The path of the detected devfile
devfilePath string
// Kclient can be used to access Kubernetes resources
KClient kclient.ClientInterface
EnvSpecificInfo *envinfo.EnvSpecificInfo
LocalConfigProvider localConfigProvider.LocalConfigProvider
}
// CreateParameters defines the options which can be provided while creating the context
type CreateParameters struct {
cmdline cmdline.Cmdline
componentContext string
devfile bool
offline bool
appIfNeeded bool
}
func NewCreateParameters(cmdline cmdline.Cmdline) CreateParameters {
return CreateParameters{cmdline: cmdline}
}
func (o CreateParameters) NeedDevfile(ctx string) CreateParameters {
o.devfile = true
o.componentContext = ctx
return o
}
func (o CreateParameters) IsOffline() CreateParameters {
o.offline = true
return o
}
func (o CreateParameters) CreateAppIfNeeded() CreateParameters {
o.appIfNeeded = true
return o
}
// New creates a context based on the given parameters
func New(parameters CreateParameters) (*Context, error) |
// NewContextCompletion disables checking for a local configuration since when we use autocompletion on the command line, we
// couldn't care less if there was a configuration. We only need to check the parameters.
func NewContextCompletion(command *cobra.Command) *Context {
cmdline := cmdline.NewCobra(command)
ctx, err := New(CreateParameters{cmdline: cmdline})
if err != nil {
util.LogErrorAndExit(err, "")
}
return ctx
}
// Component retrieves the optionally specified component or the current one if it is set. If no component is set, returns
// an error
func (o *Context) Component(optionalComponent ...string) (string, error) {
return o.ComponentAllowingEmpty(false, optionalComponent...)
}
// ComponentAllowingEmpty retrieves the optionally specified component or the current one if it is set, allowing empty
// components (instead of exiting with an error) if so specified
func (o *Context) ComponentAllowingEmpty(allowEmpty bool, optionalComponent ...string) (string, error) {
switch len(optionalComponent) {
case 0:
// if we're not specifying a component to resolve, get the current one (resolved in NewContext as cmp)
// so nothing to do here unless the calling context doesn't allow no component to be set in which case we return an error
if !allowEmpty && len(o.component) == 0 {
return "", errors.New("no component is set")
}
case 1:
cmp := optionalComponent[0]
o.component = cmp
default:
// safeguard: fail if more than one optional string is passed because it would be a programming error
return "", fmt.Errorf("ComponentAllowingEmpty function only accepts one optional argument, was given: %v", optionalComponent)
}
return o.component, nil
}
func (o *Context) GetProject() string {
return o.project
}
func (o *Context) GetApplication() string {
return o.application
}
func (o *Context) GetOutputFlag() string {
return o.outputFlag
}
func (o *Context) GetDevfilePath() string {
return o.devfilePath
}
| {
ctx := internalCxt{}
var err error
ctx.EnvSpecificInfo, err = GetValidEnvInfo(parameters.cmdline)
if err != nil {
return nil, err
}
ctx.LocalConfigProvider = ctx.EnvSpecificInfo
ctx.project = resolveProject(parameters.cmdline, ctx.EnvSpecificInfo)
ctx.application = resolveApp(parameters.cmdline, ctx.EnvSpecificInfo, parameters.appIfNeeded)
ctx.component = resolveComponent(parameters.cmdline, ctx.EnvSpecificInfo)
ctx.componentContext = parameters.componentContext
ctx.outputFlag = parameters.cmdline.FlagValueIfSet(util.OutputFlagName)
if !parameters.offline {
ctx.KClient, err = parameters.cmdline.GetKubeClient()
if err != nil {
return nil, err
}
if e := ctx.resolveProjectAndNamespace(parameters.cmdline, ctx.EnvSpecificInfo); e != nil {
return nil, e
}
if parameters.cmdline.FlagValueIfSet(util.ComponentFlagName) != "" {
if err = ctx.checkComponentExistsOrFail(); err != nil {
return nil, err
}
}
}
ctx.devfilePath = location.DevfileLocation(parameters.componentContext)
if parameters.devfile {
isDevfile := odoutil.CheckPathExists(ctx.devfilePath)
if isDevfile {
// Parse devfile and validate
devObj, err := devfile.ParseAndValidateFromFile(ctx.devfilePath)
if err != nil {
return nil, fmt.Errorf("failed to parse the devfile %s, with error: %s", ctx.devfilePath, err)
}
err = validate.ValidateDevfileData(devObj.Data)
if err != nil {
return nil, err
}
ctx.EnvSpecificInfo.SetDevfileObj(devObj)
} else {
return nil, errors.New("no devfile found")
}
}
return &Context{
internalCxt: ctx,
}, nil
} |
arrow-icon.d.ts | import * as React from 'react';
import { SVGIconProps } from '../createIcon';
export declare const ArrowIconConfig: {
name: 'ArrowIcon',
height: 1024,
width: 1097,
svgPath: 'M921.6-73.143h-885.029c-21.943 0-36.571 14.629-36.571 36.571v804.571c0 21.943 14.629 36.571 36.571 36.571h292.571l-109.714-146.286h-73.143v-585.143h658.286v73.143l146.286 146.286v-329.143c0-21.943-14.629-36.571-29.257-36.571v0zM665.6 731.428c-219.429 0-394.971-131.657-373.029-351.086 14.629-138.971 87.771-160.914 146.286-204.8-58.514 124.343 36.571 321.829 263.314 336.457 7.314 0 21.943 0 29.257 0v-219.429l365.714 329.143-365.714 329.143v-219.429c0 0-65.829 0-65.829 0z',
yOffset: 64,
xOffset: 0,
transform: 'rotate(180 0 512) scale(-1 1)' | };
export declare const ArrowIcon: React.ComponentClass<SVGIconProps>;
export default ArrowIcon; |
|
run_script.py | import json
import subprocess |
def run_script():
data = json.load(open('store.json', 'r'))
script = data['script_to_run']
if script == "":
# Nothing will be run
return 1
subprocess.call(['/usr/bin/python', script])
return 0
if __name__ == "__main__":
run_script() | |
447.number-of-boomerangs.py | #
# @lc app=leetcode id=447 lang=python
#
# [447] Number of Boomerangs
#
# https://leetcode.com/problems/number-of-boomerangs/description/
#
# algorithms
# Easy (49.20%)
# Likes: 296
# Dislikes: 447
# Total Accepted: 54.7K
# Total Submissions: 109.6K
# Testcase Example: '[[0,0],[1,0],[2,0]]'
#
# Given n points in the plane that are all pairwise distinct, a "boomerang" is
# a tuple of points (i, j, k) such that the distance between i and j equals the
# distance between i and k (the order of the tuple matters).
#
# Find the number of boomerangs. You may assume that n will be at most 500 and
# coordinates of points are all in the range [-10000, 10000] (inclusive).
#
# Example:
#
#
# Input:
# [[0,0],[1,0],[2,0]]
#
# Output:
# 2
#
# Explanation:
# The two boomerangs are [[1,0],[0,0],[2,0]] and [[1,0],[2,0],[0,0]]
#
#
#
#
#
import math
class Solution(object):
def _numberOfBoomerangs(self, points):
"""
:type points: List[List[int]]
:rtype: int
"""
# Time Limit
result = []
distance = [[0] * len(points) for _ in range(len(points))]
for i in range(len(points)):
for j in range(i):
distance[i][j] = (points[i][0]-points[j][0])**2 + (points[i][1]-points[j][1])**2
# distance[i][j] = math.sqrt((points[i][0]-points[j][0])**2
# + (points[i][1]-points[j][1])**2)
distance[j][i] = distance[i][j]
for m in range(i):
if distance[i][j] == distance[i-1-m][j]:
result.append([points[i], points[j], points[i-1-m]])
result.append([points[i-1-m], points[j], points[i]])
for m in range(j):
if distance[i][j] == distance[i][j-1-m]:
result.append([points[j], points[i], points[j-1-m]])
result.append([points[j-1-m], points[i], points[j]])
return len(result)
def numberOfBoomerangs(self, points):
|
# if __name__ == '__main__':
# s = Solution()
# print s.numberOfBoomerangs([[0, 0], [1, 0], [2, 0]])
# print s.numberOfBoomerangs([[3327,-549],[9196,-8118],[7610,-9506],[5098,8392],[8582,7953],[1053,5802],[3847,2652],[7654,8355],[1614,-9409],[9986,5538],[4660,2944],[4528,-9512],[7483,-1455],[3422,-3966],[2037,-4456],[5107,-4635],[4996,655],[7247,2606],[1149,8697],[7350,6083],[3002,8403],[8238,6850],[1055,5892],[5205,9021],[2835,5191],[911,-2505],[4488,-4561],[7983,-1677],[336,-2243],[4358,-1274],[3302,9465],[4091,-5350],[120,7690],[3608,7622],[6388,-9042],[57,-610],[9361,8295],[6240,-3232],[540,7797],[2141,-6625],[9341,3053],[7223,3829],[4844,1558],[2152,-8467],[9316,6510],[259,-1030],[2327,-5650],[9972,8800],[2040,-6420],[2774,4780],[4538,-7169],[4171,-6101],[7479,-3237],[7019,-1981],[4561,-4488],[7746,254],[4917,4969],[4083,-238],[6528,-7413],[1295,-7804],[5450,-8446],[1166,-5871],[2256,-8862],[2929,-5704],[4718,2055],[5429,-4392],[4887,9600],[9507,-1282],[2715,2878],[6737,-6372],[8390,-9165],[3882,3308],[5805,4317],[9422,8685],[3257,-2931],[881,-1293],[8623,-1601],[2836,879],[5889,2118],[1527,607],[4173,-3044],[6215,5412],[2908,-7926],[4130,-8024],[1304,7219],[1956,-3954],[8055,5839],[5706,212],[6508,5128],[8897,9765],[2197,-3870],[8472,-2828],[4529,7661],[4403,-9582],[6131,-7717],[7377,-3344],[5591,9944],[2069,-5148],[8370,-7449],[6828,-3974],[6123,-1216],[2072,530],[975,-2221],[7094,-2516],[9259,-4009],[7249,7809],[8473,2074],[4981,-6998],[9735,5737],[9772,5866],[8020,-6499],[8874,-6389],[3445,-9057],[4815,8167],[9847,1643],[4193,2322],[6780,2617],[9204,4107],[396,6298],[1591,6008],[2289,-4807],[3817,762],[7267,5150],[116,-6646],[887,-3760],[5572,-4741],[9741,4446],[5223,-462],[1742,38],[7705,1589],[1682,-1750],[263,4814],[867,9467],[8921,7616],[5765,-3135],[3624,4406],[2058,-2559],[1520,-675],[2591,-2012],[2679,-169],[4228,-1749],[5090,-6031],[2697,-9687],[9859,791],[352,3916],[8732,-1614],[2166,8995],[3200,9385],[4814,-1527],[7001,579],[5338,-3023],[1337,-2604],[4418,-7143],[3073,3362],[845,-7896],[3193,-8575],[6707,4635],[1746,-595],[4949,1605],[6548,-8347],[1873,5281],[39,-5961],[4276,-409],[9777,-909],[8064,3130],[6022,-245],[108,7360],[7151,4526],[6569,-3423],[4240,-2585],[8681,-2567],[5192,5389],[2069,-3061],[1146,3370],[4896,7694],[5023,6770],[2975,-8586],[7161,-6396],[1005,6938],[2695,-4579],[69,-4931],[5176,177],[2429,-1320],[1055,8999],[5257,-4704],[2766,-6062],[9081,-2042],[5679,-2498],[1249,6825],[7224,-3854],[872,2247],[2916,-6153],[3661,-9923],[7451,-8982],[7016,6498],[6440,-6563],[1568,-8384],[9966,-9651],[296,1021],[9348,-8095],[2669,8466],[2196,-8249],[2777,7875],[5605,4026],[1053,-7170],[172,-8075],[1429,-6912],[5772,-8557],[9518,-424],[2461,2886],[2426,-1099],[6323,-6006],[6870,-3711],[696,3518],[3662,6396],[5424,-3668],[4863,7620],[4435,7640],[1847,-3608],[8018,-7100],[9222,-5457],[4825,7004],[3983,-3050],[8447,-6499],[2878,-9092],[6387,5304],[6162,-938],[5651,3032],[5351,6347],[2902,-4634],[2743,8326],[8050,-6042],[2298,-1163],[7950,-9502],[5229,-4031],[3398,-9196],[512,-5424],[7808,847],[7878,6255],[4349,7108],[7163,736],[8764,9677],[6151,-5585],[2709,-2146],[7114,5612],[3220,-3790],[290,-8730],[168,8941],[107,-5529],[9439,-8311],[440,9189],[2493,7304],[117,6653],[8151,-5653],[2908,8852],[1455,-3577],[5941,-3428],[6101,-7908],[7339,5162],[9946,-5546],[7126,9519],[7016,3769],[789,7184],[2710,-2751],[1655,-1499],[5290,-1553],[4042,-2217],[2103,-9488],[788,-3393],[1211,3696],[1811,9019],[6471,-2248],[5591,8924],[6196,2930],[4087,6143],[3736,7565],[5662,-9248],[1334,2803],[4289,-9604],[6404,2296],[8897,-8306],[7096,-708],[5829,9199],[6156,-3383],[2158,-2633],[6665,-9678],[6386,3137],[8074,1977],[2061,4271],[4908,-7500],[6766,4996],[66,8780],[5749,1400],[7935,38],[1797,-5660],[2334,7046],[2386,9430],[2690,-1784],[4982,-1154],[1185,3492],[6214,-2149],[3814,8952],[7340,8241],[930,-4247],[8864,2190],[8254,5630],[7186,-5328],[762,9287],[6072,8697],[9325,-5779],[9389,1660],[7620,-8224],[7442,-9690],[9992,-7576],[5509,7529],[2269,8075],[5380,-3917],[7027,-7280],[4324,-5691],[8474,3188],[6499,3080],[5170,-9962],[7752,5932],[9325,176],[982,-1349],[4398,371],[6663,-1630],[2147,-9543],[5032,8491],[9234,541],[6021,1503],[8616,7753],[3938,-8004],[6826,8263],[6305,-8348],[7803,9157],[4732,-674],[9195,-1164],[5258,8520],[9012,2592],[3523,-238],[2964,6538],[8132,1463],[3348,-6835],[6307,2582],[58,-7672],[437,5027],[6433,4375],[7023,3259],[8990,-6672],[4911,3146],[2485,-4005],[2472,8032],[4831,-5918],[2905,196],[6675,6428],[9958,9639],[9319,4443],[7454,-7333],[3960,3761],[1601,-9630],[2441,2038],[5397,-1125],[6413,2420],[8486,1756],[2101,3398],[4902,938],[5745,-2626],[5323,-3071],[1456,8228],[7125,-1869],[1008,3435],[4122,6679],[4230,1577],[9346,8190],[1690,947],[4913,4132],[9337,310],[3007,-4249],[9083,-8507],[7507,-2464],[1243,-7591],[4826,-3011],[6135,-9851],[3918,7591],[8377,-2605],[5723,-4262],[830,-3803],[2417,-8587],[7774,8116],[5955,9465],[5415,868],[9949,-5247],[1179,2956],[6856,6614],[801,-9285],[4150,8397],[9476,8976],[1738,-4389],[9126,2008],[3202,3855],[9403,-4723],[9593,6585],[1475,-7989],[7998,-4399],[127,306],[1418,-4458],[1174,1367],[6647,-7647],[4323,3503],[8967,1477],[4218,9469],[6226,3694],[8446,-2036],[9305,3924],[9972,8860],[7779,5727],[4137,-6275],[8664,1964],[5736,-6985],[7566,-7785],[3321,8984],[4109,4495],[352,757],[3201,1027],[4260,-1480],[8856,4831],[7990,-4918],[8525,-7212],[3046,-5817],[6712,-630],[3043,-5509],[1449,-6468],[8216,-3534],[5497,304],[9481,3063],[8871,9154],[8399,2981],[1,8751],[90,-6798],[6131,-9298],[8075,-5013],[5533,6065],[70,-9589],[5205,9468],[946,1917],[5191,-6011],[2760,-7008],[3873,7329],[9458,9370],[7633,5291],[8785,2857],[797,3537],[2190,-9201],[2288,-7720],[353,4771],[9334,-1572],[9759,1220],[845,-3819],[7983,6050],[2001,-1071],[4319,-2808],[9270,7080],[6537,3143],[4409,2347],[8866,8394],[7639,4003],[7603,4788],[7540,-207],[5587,6181],[8425,5941],[952,-5888],[721,-2937],[5332,-8433],[3244,-6685],[3969,5246],[2244,8289],[8790,-8486],[1721,-4673],[1009,-3870],[7675,9875],[876,-8334],[231,-1520],[6454,7771],[4625,2042],[304,9403],[4335,-8743],[3515,-4944],[4672,8847],[2975,7917],[8514,6945],[3163,758],[1586,1953],[8624,-6693],[7281,9633],[5789,1308],[5861,-6983],[2974,-3908],[7849,-572],[215,-7525]])
| """
:type points: List[List[int]]
:rtype: int
"""
conunt = 0
data = {}
for i in range(len(points)):
for j in range(i):
distance = (points[i][0]-points[j][0])**2 + (points[i][1]-points[j][1])**2
exts = data.get(distance)
if not exts:
data[distance] = [[i,j]]
else:
for ext in exts:
if ext[0] == i or ext[0] == j or ext[1] == i or ext[1] == j:
conunt += 2
data[distance].append([i,j])
return conunt |
tenant.pb.gw.go | // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: aserto/registry_tenant/v1/tenant.proto
/*
Package registry_tenant is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package registry_tenant
import (
"context"
"io"
"net/http"
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
"github.com/grpc-ecosystem/grpc-gateway/v2/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/proto"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = metadata.Join
var (
filter_Tenant_ListTenants_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Tenant_ListTenants_0(ctx context.Context, marshaler runtime.Marshaler, client TenantClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListTenantsRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Tenant_ListTenants_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.ListTenants(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Tenant_ListTenants_0(ctx context.Context, marshaler runtime.Marshaler, server TenantServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListTenantsRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Tenant_ListTenants_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.ListTenants(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Tenant_ListPublicTenants_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Tenant_ListPublicTenants_0(ctx context.Context, marshaler runtime.Marshaler, client TenantClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListPublicTenantsRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Tenant_ListPublicTenants_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.ListPublicTenants(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Tenant_ListPublicTenants_0(ctx context.Context, marshaler runtime.Marshaler, server TenantServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListPublicTenantsRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Tenant_ListPublicTenants_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.ListPublicTenants(ctx, &protoReq)
return msg, metadata, err
}
// RegisterTenantHandlerServer registers the http handlers for service Tenant to "mux".
// UnaryRPC :call TenantServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterTenantHandlerFromEndpoint instead.
func RegisterTenantHandlerServer(ctx context.Context, mux *runtime.ServeMux, server TenantServer) error {
mux.Handle("GET", pattern_Tenant_ListTenants_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/aserto.registry_tenant.v1.Tenant/ListTenants", runtime.WithHTTPPathPattern("/api/v1/tenants"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Tenant_ListTenants_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Tenant_ListTenants_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Tenant_ListPublicTenants_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/aserto.registry_tenant.v1.Tenant/ListPublicTenants", runtime.WithHTTPPathPattern("/api/v1/tenants/public"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Tenant_ListPublicTenants_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Tenant_ListPublicTenants_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterTenantHandlerFromEndpoint is same as RegisterTenantHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterTenantHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterTenantHandler(ctx, mux, conn)
}
// RegisterTenantHandler registers the http handlers for service Tenant to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterTenantHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterTenantHandlerClient(ctx, mux, NewTenantClient(conn))
}
// RegisterTenantHandlerClient registers the http handlers for service Tenant
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "TenantClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "TenantClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "TenantClient" to call the correct interceptors.
func RegisterTenantHandlerClient(ctx context.Context, mux *runtime.ServeMux, client TenantClient) error |
var (
pattern_Tenant_ListTenants_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "tenants"}, ""))
pattern_Tenant_ListPublicTenants_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "tenants", "public"}, ""))
)
var (
forward_Tenant_ListTenants_0 = runtime.ForwardResponseMessage
forward_Tenant_ListPublicTenants_0 = runtime.ForwardResponseMessage
)
| {
mux.Handle("GET", pattern_Tenant_ListTenants_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/aserto.registry_tenant.v1.Tenant/ListTenants", runtime.WithHTTPPathPattern("/api/v1/tenants"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Tenant_ListTenants_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Tenant_ListTenants_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Tenant_ListPublicTenants_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/aserto.registry_tenant.v1.Tenant/ListPublicTenants", runtime.WithHTTPPathPattern("/api/v1/tenants/public"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Tenant_ListPublicTenants_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Tenant_ListPublicTenants_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
} |
file.go | import (
"fmt"
"os"
"github.com/rename-this/vhs/core"
)
// NewSource creates a new file source.
func NewSource(_ core.Context) (core.Source, error) {
return &source{
streams: make(chan core.InputReader),
}, nil
}
type source struct {
streams chan core.InputReader
}
func (s *source) Init(ctx core.Context) {
defer close(s.streams)
ctx.Logger = ctx.Logger.With().
Str(core.LoggerKeyComponent, "file_source").
Logger()
file, err := os.Open(ctx.FlowConfig.InputFile)
if err != nil {
ctx.Errors <- fmt.Errorf("failed to open %s: %w", ctx.FlowConfig.InputFile, err)
return
}
s.streams <- &fileReader{
file: file,
meta: core.NewMeta(ctx.FlowConfig.InputFile, nil),
}
}
func (s *source) Streams() <-chan core.InputReader {
return s.streams
}
type fileReader struct {
file *os.File
meta *core.Meta
}
func (f *fileReader) Read(p []byte) (int, error) {
return f.file.Read(p)
}
func (f *fileReader) Close() error {
return f.file.Close()
}
func (f *fileReader) Meta() *core.Meta {
return f.meta
} | package file
|
|
filter.go | // Copyright (C) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resolve
import (
"context"
"github.com/google/gapid/gapis/api"
"github.com/google/gapid/gapis/api/sync"
"github.com/google/gapid/gapis/service/path"
)
type filter func(api.CmdID, api.Cmd, *api.GlobalState) bool
func | (ctx context.Context, p *path.Capture, f *path.CommandFilter, sd *sync.Data) (filter, error) {
filters := []filter{
func(id api.CmdID, cmd api.Cmd, s *api.GlobalState) bool {
return !sd.Hidden.Contains(id)
},
}
if f := f.GetContext(); f.IsValid() {
c, err := Context(ctx, p.Context(f.ID()))
if err != nil {
return nil, err
}
filters = append(filters, func(id api.CmdID, cmd api.Cmd, s *api.GlobalState) bool {
if api := cmd.API(); api != nil {
if ctx := api.Context(s, cmd.Thread()); ctx != nil {
return ctx.ID() == c.ID
}
}
return false
})
}
if len(f.GetThreads()) > 0 {
filters = append(filters, func(id api.CmdID, cmd api.Cmd, s *api.GlobalState) bool {
thread := cmd.Thread()
for _, t := range f.Threads {
if t == thread {
return true
}
}
return false
})
}
return func(id api.CmdID, cmd api.Cmd, s *api.GlobalState) bool {
for _, f := range filters {
if !f(id, cmd, s) {
return false
}
}
return true
}, nil
}
| buildFilter |
show.py | import argparse
from packaging.version import Version
from pdm import termui
from pdm.cli.commands.base import BaseCommand
from pdm.exceptions import PdmUsageError
from pdm.models.candidates import Candidate
from pdm.models.project_info import ProjectInfo
from pdm.models.requirements import parse_requirement
from pdm.project import Project
from pdm.utils import normalize_name
def filter_stable(candidate: Candidate) -> bool:
assert candidate.version
return not Version(candidate.version).is_prerelease
class Command(BaseCommand):
"""Show the package information"""
metadata_keys = ["name", "version", "summary", "license", "platform", "keywords"]
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"package",
type=normalize_name,
nargs=argparse.OPTIONAL, | help="Specify the package name, or show this package if not given",
)
for option in self.metadata_keys:
parser.add_argument(
f"--{option}", action="store_true", help=f"Show {option}"
)
def handle(self, project: Project, options: argparse.Namespace) -> None:
package = options.package
if package:
req = parse_requirement(package)
repository = project.get_repository()
# reverse the result so that latest is at first.
matches = repository.find_candidates(req, True, True)
latest = next(iter(matches), None)
if not latest:
project.core.ui.echo(
termui.yellow(f"No match found for the package {package!r}"),
err=True,
)
return
latest_stable = next(filter(filter_stable, matches), None)
metadata = latest.prepare(project.environment).metadata
else:
if not project.meta.name:
raise PdmUsageError("This project is not a package")
metadata = project.meta
package = normalize_name(metadata.name)
latest_stable = None
assert metadata
project_info = ProjectInfo(metadata)
if any(getattr(options, key, None) for key in self.metadata_keys):
for key in self.metadata_keys:
if getattr(options, key, None):
project.core.ui.echo(project_info[key])
return
installed = project.environment.get_working_set().get(package)
if latest_stable:
project_info.latest_stable_version = str(latest_stable.version)
if installed:
project_info.installed_version = str(installed.version)
project.core.ui.display_columns(list(project_info.generate_rows())) | |
spectralsim.py | #-------------------------------------------------------------------------------
# Name: Spectralsim
# Purpose: Simulation of standard normal random fields
#
# Author: Dr.-Ing. S. Hoerning
#
# Created: 02.05.2018, Centre for Natural Gas, EAIT,
# The University of Queensland, Brisbane, QLD, Australia
#-------------------------------------------------------------------------------
import numpy as np
from . import covariancefunction as covfun
class spectral_random_field(object):
def | (self,
domainsize = (100,100),
covmod = '1.0 Exp(2.)',
periodic = False,
):
self.counter = 0
self.periodic = periodic
# create self.xyz for plotting 3d
if len(domainsize) == 3:
self.xyz = np.mgrid[[slice(0,n,1) for n in domainsize]].reshape(3,-1).T
# adjust domainsize by cutoff for non-perjodic output
self.cutoff = 0
if not self.periodic:
cutoff = covfun.find_maximum_range(covmod)
cutoffs = []
for dim in domainsize:
tsize = dim + cutoff
# find closest multiple of 8 that is larger than tsize
m8 = np.int(np.ceil(tsize/8.)*8.)
cutoffs.append(m8 - dim)
self.cutoff = np.array(cutoffs)
self.domainsize = np.array(domainsize)+self.cutoff
self.covmod = covmod
self.ndim = len(self.domainsize)
self.npoints = np.prod(self.domainsize)
self.grid = np.mgrid[[slice(0,n,1) for n in self.domainsize]]
# ensure periodicity of domain
for i in range(self.ndim):
self.domainsize = self.domainsize[:,np.newaxis]
self.grid = np.min((self.grid,np.array(self.domainsize)-self.grid),axis=0)
# compute distances from origin (--> wavenumbers in fourier space)
self.h = ((self.grid**2).sum(axis=0))**0.5
# covariances (in fourier space!!!)
self.Q = covfun.Covariogram(self.h, self.covmod)
# FFT of covariances
self.FFTQ = np.abs(np.fft.fftn(self.Q))
# eigenvalues of decomposition
self.sqrtFFTQ = np.sqrt(self.FFTQ / self.npoints)
self.Y = self.simnew()
def simnew(self):
self.counter += 1
# compute random field via inverse fourier transform
real = np.random.standard_normal(size=self.sqrtFFTQ.shape)
imag = np.random.standard_normal(size=self.sqrtFFTQ.shape)
epsilon = real + 1j*imag
rand = epsilon * self.sqrtFFTQ
self.Y = np.real(np.fft.ifftn(rand))*self.npoints
if not self.periodic:
# readjust domainsize to correct size (--> no boundary effects...)
gridslice = [slice(0,(self.domainsize.squeeze()-self.cutoff)[i],1)
for i in range(self.ndim)]
self.Y = self.Y[tuple(gridslice)]
self.Y = self.Y.reshape(self.domainsize.squeeze()-self.cutoff)
return self.Y
# TEST CASE
if __name__ == "__main__":
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
domain = (30, 30, 30)
covmod = '1.0 Exp(4.)'
spec = spectral_random_field(domainsize = domain, covmod = covmod)
field3d = spec.simnew()
xyz = np.mgrid[[slice(0 , n, 1) for n in domain]].reshape(3,-1).T
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xyz[:,0], xyz[:,1], xyz[:,2], c=field3d.flatten())
plt.show()
| __init__ |
Pager.js | import React from 'react';
import { Link } from 'react-router-dom';
function Pager(props) {
const { category } = props;
return (
<div className="pager">
<div><a href="next" className="next">← Prev</a></div>
<ul className='counter'> | <li><Link to={category + '?page=' + 1} >1</Link></li>
<li><Link to={category + '?page=' + 2} >2</Link></li>
<li><Link to={category + '?page=' + 3} >3</Link></li>
<li><Link to={category + '?page=' + 4} >4</Link></li>
<li><Link to={category + '?page=' + 5} >5</Link></li>
<li><Link to={category + '?page=' + 6} >6</Link></li>
<li><Link to={category + '?page=' + 7} >7</Link></li>
<li><Link to={category + '?page=' + 8} >8</Link></li>
<li><Link to={category + '?page=' + 9} >9</Link></li>
</ul>
<div><a href="next" className="next">Next →</a></div>
</div>
)
}
export default Pager; | |
SurveyDetailRoute.js | import { connect } from 'react-redux';
import prepare from 'app/utils/prepare';
import { fetch, shareSurvey, hideSurvey } from 'app/actions/SurveyActions';
import SurveyDetail from './components/SurveyDetail';
import { compose } from 'redux';
import { selectSurveyById } from 'app/reducers/surveys';
import { LoginPage } from 'app/components/LoginForm';
import replaceUnlessLoggedIn from 'app/utils/replaceUnlessLoggedIn';
import loadingIndicator from 'app/utils/loadingIndicator';
import { push } from 'react-router-redux';
const loadData = ({ params: { surveyId } }, dispatch) => |
const mapStateToProps = (state, props) => {
const surveyId = Number(props.params.surveyId);
const survey = selectSurveyById(state, { surveyId });
return {
survey,
surveyId,
actionGrant: survey.actionGrant
};
};
const mapDispatchToProps = {
push,
shareSurvey,
hideSurvey
};
export default compose(
replaceUnlessLoggedIn(LoginPage),
prepare(loadData, ['params.surveyId']),
connect(
mapStateToProps,
mapDispatchToProps
),
loadingIndicator(['survey.questions', 'survey.event'])
)(SurveyDetail); | dispatch(fetch(surveyId)); |
ad_hoc.py | import logging
import os
import time
from parsl.channels import LocalChannel
from parsl.launchers import SimpleLauncher
from parsl.providers.provider_base import ExecutionProvider, JobStatus, JobState
from parsl.providers.error import ScriptPathError
from parsl.utils import RepresentationMixin
logger = logging.getLogger(__name__)
class AdHocProvider(ExecutionProvider, RepresentationMixin):
""" Ad-hoc execution provider
This provider is used to provision execution resources over one or more ad hoc nodes
that are each accessible over a Channel (say, ssh) but otherwise lack a cluster scheduler.
Parameters
----------
channels : list of Channel ojects
Each channel represents a connection to a remote node
worker_init : str
Command to be run before starting a worker, such as 'module load Anaconda; source activate env'.
Since this provider calls the same worker_init across all nodes in the ad-hoc cluster, it is
recommended that a single script is made available across nodes such as ~/setup.sh that can
be invoked.
cmd_timeout : int
Duration for which the provider will wait for a command to be invoked on a remote system.
Defaults to 30s
parallelism : float
Determines the ratio of workers to tasks as managed by the strategy component
"""
def __init__(self,
channels=[],
worker_init='',
cmd_timeout=30,
parallelism=1,
move_files=None):
self.channels = channels
self._label = 'ad-hoc'
self.worker_init = worker_init
self.cmd_timeout = cmd_timeout
self.parallelism = 1
self.move_files = move_files
self.launcher = SimpleLauncher()
self.init_blocks = self.min_blocks = self.max_blocks = len(channels)
# This will be overridden by the DFK to the rundirs.
self.script_dir = "."
# In ad-hoc mode, nodes_per_block should be 1
self.nodes_per_block = 1
# Dictionary that keeps track of jobs, keyed on job_id
self.resources = {}
self.least_loaded = self._least_loaded()
logger.debug("AdHoc provider initialized")
def _write_submit_script(self, script_string, script_filename):
'''
Load the template string with config values and write the generated submit script to
a submit script file.
Parameters
----------
script_string: (string)
The template string to be used for the writing submit script
script_filename: (string)
Name of the submit script
Returns
-------
None: on success
Raises
------
ScriptPathError
Unable to write submit script out
'''
try:
with open(script_filename, 'w') as f:
f.write(script_string)
except IOError as e:
logger.error("Failed writing to submit script: %s", script_filename)
raise (ScriptPathError(script_filename, e))
return None
def _least_loaded(self):
""" Find channels that are not in use
Returns
-------
channel : Channel object
None : When there are no more available channels
"""
while True:
channel_counts = {channel: 0 for channel in self.channels}
for job_id in self.resources:
channel = self.resources[job_id]['channel']
if self.resources[job_id]['status'].state == JobState.RUNNING:
channel_counts[channel] = channel_counts.get(channel, 0) + 1
else:
channel_counts[channel] = channel_counts.get(channel, 0)
logger.debug("Channel_counts : {}".format(channel_counts))
if 0 not in channel_counts.values():
yield None
for channel in channel_counts:
if channel_counts[channel] == 0:
yield channel
def submit(self, command, tasks_per_node, job_name="parsl.adhoc"):
''' Submits the command onto a channel from the list of channels
Submit returns an ID that corresponds to the task that was just submitted.
Parameters
----------
command: (String)
Commandline invocation to be made on the remote side.
tasks_per_node: (int)
command invocations to be launched per node
job_name: (String)
Name of the job. Default : parsl.adhoc
Returns
-------
None
At capacity, cannot provision more
job_id: (string)
Identifier for the job
'''
channel = next(self.least_loaded)
if channel is None:
logger.warning("All Channels in Ad-Hoc provider are in use")
return None
job_name = "{0}.{1}".format(job_name, time.time())
# Set script path
script_path = "{0}/{1}.sh".format(self.script_dir, job_name)
script_path = os.path.abspath(script_path)
wrap_command = self.worker_init + '\n' + self.launcher(command, tasks_per_node, self.nodes_per_block)
self._write_submit_script(wrap_command, script_path)
job_id = None
remote_pid = None
final_cmd = None
if (self.move_files is None and not isinstance(channel, LocalChannel)) or (self.move_files):
logger.debug("Pushing start script")
script_path = channel.push_file(script_path, channel.script_dir)
# Bash would return until the streams are closed. So we redirect to a outs file
final_cmd = 'bash {0} > {0}.out 2>&1 & \n echo "PID:$!" '.format(script_path)
retcode, stdout, stderr = channel.execute_wait(final_cmd, self.cmd_timeout)
for line in stdout.split('\n'):
if line.startswith("PID:"):
|
if job_id is None:
logger.warning("Channel failed to start remote command/retrieve PID")
self.resources[job_id] = {'job_id': job_id,
'status': JobStatus(JobState.RUNNING),
'cmd': final_cmd,
'channel': channel,
'remote_pid': remote_pid}
return job_id
def status(self, job_ids):
""" Get status of the list of jobs with job_ids
Parameters
----------
job_ids : list of strings
List of job id strings
Returns
-------
list of JobStatus objects
"""
for job_id in job_ids:
channel = self.resources[job_id]['channel']
status_command = "ps --pid {} | grep {}".format(self.resources[job_id]['job_id'],
self.resources[job_id]['cmd'].split()[0])
retcode, stdout, stderr = channel.execute_wait(status_command)
if retcode != 0 and self.resources[job_id]['status'].state == JobState.RUNNING:
self.resources[job_id]['status'] = JobStatus(JobState.FAILED)
return [self.resources[job_id]['status'] for job_id in job_ids]
def cancel(self, job_ids):
""" Cancel a list of jobs with job_ids
Parameters
----------
job_ids : list of strings
List of job id strings
Returns
-------
list of confirmation bools: [True, False...]
"""
logger.debug("Cancelling jobs: {}".format(job_ids))
rets = []
for job_id in job_ids:
channel = self.resources[job_id]['channel']
cmd = "kill -TERM -$(ps -o pgid= {} | grep -o '[0-9]*')".format(self.resources[job_id]['job_id'])
retcode, stdout, stderr = channel.execute_wait(cmd)
if retcode == 0:
rets.append(True)
else:
rets.append(False)
self.resources[job_id]['status'] = JobStatus(JobState.COMPLETED)
return rets
@property
def scaling_enabled(self):
return True
@property
def label(self):
return self._label
@property
def status_polling_interval(self):
return 10
| remote_pid = line.split("PID:")[1].strip()
job_id = remote_pid |
covid.py | """Custom COVID19 Compartmental model
"""
from ..model import CompartmentalModel
class COVID19(CompartmentalModel):
def | (self,
N,
beta,
incubation_rate = 1/3.7,
recovery_rate_asymptomatic = 1/4.7,
recovery_rate_mild = 1/4.7,
symptoms_to_hospital_rate = 1/5.5,
symptoms_to_icu_rate = 1/7,
proba_severe = 0.071,
proba_asymptomatic = 0.2,
proba_icu = 0.182,
recovery_rate_hospital = 0.046,
recovery_rate_icu = 0.035,
death_rate_hospital = 0.0046,
death_rate_icu = 0.0087,
isolation_ratio = 0.25,
offset = None,
):
"""COVID19 Compartmental Model
Parameters:
Default params are set according to INSERM research paper
"""
params = {
"N":N,
"beta":beta,
"incubation_rate":incubation_rate,
"recovery_rate_asymptomatic":recovery_rate_asymptomatic,
"recovery_rate_mild":recovery_rate_mild,
"recovery_rate_hospital":recovery_rate_hospital,
"recovery_rate_icu":recovery_rate_icu,
"symptoms_to_icu_rate":symptoms_to_icu_rate,
"symptoms_to_hospital_rate":symptoms_to_hospital_rate,
"death_rate_hospital":death_rate_hospital,
"death_rate_icu":death_rate_icu,
"proba_severe":proba_severe,
"proba_asymptomatic":proba_asymptomatic,
"proba_icu":proba_icu,
"isolation_ratio":isolation_ratio,
}
# Define compartments name and number
compartments = ["S","E","Ia","Im","Is","H","ICU","D","R"]
super().__init__(compartments,offset = offset,params = params)
# Parameters
self.N = N
self.beta = self._make_beta_parameter(beta)
# Prepare transitions
transitions = {
"S":{
"E":lambda y,t : y["S"] / N * self.beta(y,t) * (y["Ia"]+ isolation_ratio * (y["Im"] + y["Is"]))
},
"E":{
"Ia":lambda y,t : incubation_rate * (proba_asymptomatic) * y["E"],
"Im":lambda y,t : incubation_rate * (1 - proba_asymptomatic - proba_severe) * y["E"],
"Is":lambda y,t : incubation_rate * (proba_severe) * y["E"],
},
"Ia":{
"R":lambda y,t : recovery_rate_asymptomatic * y["Ia"],
},
"Im":{
"R":lambda y,t : recovery_rate_hospital* y["Im"],
},
"Is":{
"ICU":lambda y,t : symptoms_to_icu_rate * (proba_icu) * y["Is"],
"H":lambda y,t : symptoms_to_icu_rate * (1-proba_icu) * y["Is"],
},
"ICU":{
"R":lambda y,t : recovery_rate_icu * y["ICU"],
"D":lambda y,t : death_rate_icu * y["ICU"],
},
"H":{
"R":lambda y,t : recovery_rate_hospital * y["H"],
"D":lambda y,t : death_rate_hospital * y["H"],
},
}
# Add transition
self.add_transitions(transitions)
def R0(self, beta):
pa = self.params["proba_asymptomatic"]
ps = self.params["proba_severe"]
proba_icu = self.params["proba_icu"]
recovery_rate_asymptomatic = self.params["recovery_rate_asymptomatic"]
recovery_rate_mild = self.params["recovery_rate_mild"]
recovery_rate_severe = (1-proba_icu) * self.params["symptoms_to_hospital_rate"] + proba_icu * self.params["symptoms_to_icu_rate"]
isolation_ratio = self.params["isolation_ratio"]
return beta * (pa / recovery_rate_asymptomatic + (isolation_ratio * (1-pa-ps) / recovery_rate_mild) + (isolation_ratio * ps / recovery_rate_severe))
| __init__ |
mod.rs | // Copyright (c) The Diem Core Contributors
// Copyright (c) The Move Contributors
// SPDX-License-Identifier: Apache-2.0
mod absint;
pub mod ast;
mod borrows;
pub(crate) mod cfg;
mod constant_fold;
mod eliminate_locals;
mod inline_blocks;
mod liveness;
mod locals;
mod remove_no_ops;
mod simplify_jumps;
pub(crate) mod translate;
use crate::{
expansion::ast::{AbilitySet, ModuleIdent},
hlir::ast::*,
parser::ast::{StructName, Var},
shared::{unique_map::UniqueMap, CompilationEnv},
};
use cfg::*;
use move_ir_types::location::*;
use std::collections::{BTreeMap, BTreeSet};
pub fn refine_inference_and_verify(
compilation_env: &mut CompilationEnv,
struct_declared_abilities: &UniqueMap<ModuleIdent, UniqueMap<StructName, AbilitySet>>,
signature: &FunctionSignature,
acquires: &BTreeMap<StructName, Loc>,
locals: &UniqueMap<Var, SingleType>,
cfg: &mut BlockCFG,
infinite_loop_starts: &BTreeSet<Label>,
) {
liveness::last_usage(compilation_env, locals, cfg, infinite_loop_starts);
let locals_states = locals::verify(
compilation_env,
struct_declared_abilities,
signature,
acquires,
locals,
cfg,
);
liveness::release_dead_refs(&locals_states, locals, cfg, infinite_loop_starts);
borrows::verify(compilation_env, signature, acquires, locals, cfg);
}
pub fn optimize(
signature: &FunctionSignature,
_locals: &UniqueMap<Var, SingleType>,
cfg: &mut BlockCFG,
) {
loop {
let mut changed = false;
changed |= eliminate_locals::optimize(signature, cfg);
changed |= constant_fold::optimize(cfg);
changed |= simplify_jumps::optimize(cfg);
changed |= inline_blocks::optimize(cfg);
if !changed {
break;
}
} | } |
|
object_file_header.rs | //! Generate a header file for the object file produced by the ObjectFile engine.
use super::{generate_c, CStatement, CType};
use wasmer_compiler::{Symbol, SymbolRegistry};
use wasmer_vm::ModuleInfo;
/// Helper functions to simplify the usage of the object file engine.
const HELPER_FUNCTIONS: &str = r#"
wasm_byte_vec_t generate_serialized_data() {
// We need to pass all the bytes as one big buffer so we have to do all this logic to memcpy
// the various pieces together from the generated header file.
//
// We should provide a `deseralize_vectored` function to avoid requiring this extra work.
char* byte_ptr = (char*)&WASMER_METADATA[0];
size_t num_function_pointers
= sizeof(function_pointers) / sizeof(void*);
size_t num_function_trampolines
= sizeof(function_trampolines) / sizeof(void*);
size_t num_dynamic_function_trampoline_pointers
= sizeof(dynamic_function_trampoline_pointers) / sizeof(void*);
size_t buffer_size = module_bytes_len
+ sizeof(size_t) + sizeof(function_pointers)
+ sizeof(size_t) + sizeof(function_trampolines)
+ sizeof(size_t) + sizeof(dynamic_function_trampoline_pointers);
char* memory_buffer = (char*) malloc(buffer_size);
size_t current_offset = 0;
memcpy(memory_buffer + current_offset, byte_ptr, module_bytes_len);
current_offset += module_bytes_len;
memcpy(memory_buffer + current_offset, (void*)&num_function_pointers, sizeof(size_t));
current_offset += sizeof(size_t);
memcpy(memory_buffer + current_offset, (void*)&function_pointers[0], sizeof(function_pointers));
current_offset += sizeof(function_pointers);
memcpy(memory_buffer + current_offset, (void*)&num_function_trampolines, sizeof(size_t));
current_offset += sizeof(size_t);
memcpy(memory_buffer + current_offset, (void*)&function_trampolines[0], sizeof(function_trampolines));
current_offset += sizeof(function_trampolines);
memcpy(memory_buffer + current_offset, (void*)&num_dynamic_function_trampoline_pointers, sizeof(size_t));
current_offset += sizeof(size_t);
memcpy(memory_buffer + current_offset, (void*)&dynamic_function_trampoline_pointers[0], sizeof(dynamic_function_trampoline_pointers));
current_offset += sizeof(dynamic_function_trampoline_pointers);
wasm_byte_vec_t module_byte_vec = {
.size = buffer_size,
.data = memory_buffer,
};
return module_byte_vec;
}
wasm_module_t* wasmer_object_file_engine_new(wasm_store_t* store, const char* wasm_name) {
// wasm_name intentionally unused for now: will be used in the future.
wasm_byte_vec_t module_byte_vec = generate_serialized_data();
wasm_module_t* module = wasm_module_deserialize(store, &module_byte_vec);
free(module_byte_vec.data);
return module;
}
"#;
/// Generate the header file that goes with the generated object file.
pub fn generate_header_file(
module_info: &ModuleInfo,
symbol_registry: &dyn SymbolRegistry,
metadata_length: usize,
) -> String | {
let mut c_statements = vec![];
c_statements.push(CStatement::LiteralConstant {
value: "#include <stdlib.h>\n#include <string.h>\n\n".to_string(),
});
c_statements.push(CStatement::LiteralConstant {
value: "#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n".to_string(),
});
c_statements.push(CStatement::Declaration {
name: "module_bytes_len".to_string(),
is_extern: false,
is_const: true,
ctype: CType::U32,
definition: Some(Box::new(CStatement::LiteralConstant {
value: metadata_length.to_string(),
})),
});
c_statements.push(CStatement::Declaration {
name: "WASMER_METADATA".to_string(),
is_extern: true,
is_const: true,
ctype: CType::Array {
inner: Box::new(CType::U8),
},
definition: None,
});
let function_declarations = module_info
.functions
.iter()
.filter_map(|(f_index, sig_index)| {
Some((module_info.local_func_index(f_index)?, sig_index))
})
.map(|(function_local_index, _sig_index)| {
let function_name =
symbol_registry.symbol_to_name(Symbol::LocalFunction(function_local_index));
// TODO: figure out the signature here too
CStatement::Declaration {
name: function_name,
is_extern: false,
is_const: false,
ctype: CType::Function {
arguments: vec![CType::Void],
return_value: None,
},
definition: None,
}
});
c_statements.push(CStatement::LiteralConstant {
value: r#"
// Compiled Wasm function pointers ordered by function index: the order they
// appeared in in the Wasm module.
"#
.to_string(),
});
c_statements.extend(function_declarations);
// function pointer array
{
let function_pointer_array_statements = module_info
.functions
.iter()
.filter_map(|(f_index, sig_index)| {
Some((module_info.local_func_index(f_index)?, sig_index))
})
.map(|(function_local_index, _sig_index)| {
let function_name =
symbol_registry.symbol_to_name(Symbol::LocalFunction(function_local_index));
// TODO: figure out the signature here too
CStatement::Cast {
target_type: CType::void_ptr(),
expression: Box::new(CStatement::LiteralConstant {
value: function_name,
}),
}
})
.collect::<Vec<_>>();
c_statements.push(CStatement::Declaration {
name: "function_pointers".to_string(),
is_extern: false,
is_const: true,
ctype: CType::Array {
inner: Box::new(CType::void_ptr()),
},
definition: Some(Box::new(CStatement::LiteralArray {
items: function_pointer_array_statements,
})),
});
}
let func_trampoline_declarations =
module_info
.signatures
.iter()
.map(|(sig_index, _func_type)| {
let function_name =
symbol_registry.symbol_to_name(Symbol::FunctionCallTrampoline(sig_index));
CStatement::Declaration {
name: function_name,
is_extern: false,
is_const: false,
ctype: CType::Function {
arguments: vec![CType::void_ptr(), CType::void_ptr(), CType::void_ptr()],
return_value: None,
},
definition: None,
}
});
c_statements.push(CStatement::LiteralConstant {
value: r#"
// Trampolines (functions by which we can call into Wasm) ordered by signature.
// There is 1 trampoline per function signature in the order they appear in
// the Wasm module.
"#
.to_string(),
});
c_statements.extend(func_trampoline_declarations);
// function trampolines
{
let function_trampoline_statements = module_info
.signatures
.iter()
.map(|(sig_index, _vm_shared_index)| {
let function_name =
symbol_registry.symbol_to_name(Symbol::FunctionCallTrampoline(sig_index));
CStatement::LiteralConstant {
value: function_name,
}
})
.collect::<Vec<_>>();
c_statements.push(CStatement::Declaration {
name: "function_trampolines".to_string(),
is_extern: false,
is_const: true,
ctype: CType::Array {
inner: Box::new(CType::void_ptr()),
},
definition: Some(Box::new(CStatement::LiteralArray {
items: function_trampoline_statements,
})),
});
}
let dyn_func_declarations = module_info
.functions
.keys()
.take(module_info.num_imported_functions)
.map(|func_index| {
let function_name =
symbol_registry.symbol_to_name(Symbol::DynamicFunctionTrampoline(func_index));
// TODO: figure out the signature here
CStatement::Declaration {
name: function_name,
is_extern: false,
is_const: false,
ctype: CType::Function {
arguments: vec![CType::void_ptr(), CType::void_ptr(), CType::void_ptr()],
return_value: None,
},
definition: None,
}
});
c_statements.push(CStatement::LiteralConstant {
value: r#"
// Dynamic trampolines are per-function and are used for each function where
// the type signature is not known statically. In this case, this corresponds to
// the imported functions.
"#
.to_string(),
});
c_statements.extend(dyn_func_declarations);
c_statements.push(CStatement::TypeDef {
source_type: CType::Function {
arguments: vec![CType::void_ptr(), CType::void_ptr(), CType::void_ptr()],
return_value: None,
},
new_name: "dyn_func_trampoline_t".to_string(),
});
// dynamic function trampoline pointer array
{
let dynamic_function_trampoline_statements = module_info
.functions
.keys()
.take(module_info.num_imported_functions)
.map(|func_index| {
let function_name =
symbol_registry.symbol_to_name(Symbol::DynamicFunctionTrampoline(func_index));
CStatement::LiteralConstant {
value: function_name,
}
})
.collect::<Vec<_>>();
c_statements.push(CStatement::Declaration {
name: "dynamic_function_trampoline_pointers".to_string(),
is_extern: false,
is_const: true,
ctype: CType::Array {
inner: Box::new(CType::TypeDef("dyn_func_trampoline_t".to_string())),
},
definition: Some(Box::new(CStatement::LiteralArray {
items: dynamic_function_trampoline_statements,
})),
});
}
c_statements.push(CStatement::LiteralConstant {
value: HELPER_FUNCTIONS.to_string(),
});
c_statements.push(CStatement::LiteralConstant {
value: "\n#ifdef __cplusplus\n}\n#endif\n\n".to_string(),
});
generate_c(&c_statements)
} |
|
test_patchserver.py | """
Tests for PyPoE.poe.patchserver
Overview
===============================================================================
+----------+------------------------------------------------------------------+
| Path | tests/PyPoE/poe/test_patchserver.py |
+----------+------------------------------------------------------------------+
| Version | 1.0.0a0 |
+----------+------------------------------------------------------------------+
| Revision | $Id: f728c391f0d3f70e7cfa1e9837dfcc22ca3a34d2 $ |
+----------+------------------------------------------------------------------+
| Author | Omega_K2 |
+----------+------------------------------------------------------------------+
Description
===============================================================================
Tests for patchserver.py
Agreement
===============================================================================
See PyPoE/LICENSE
TODO
===============================================================================
Testing on live data is difficult, since we can't verify it was downloaded
correctly as the contents of the files may change. Perhaps find a good
candidate for testing.
"""
# =============================================================================
# Imports
# =============================================================================
# Python
import os
import re
from urllib.error import HTTPError
from tempfile import TemporaryDirectory
# 3rd-party
import pytest
# self
from PyPoE.poe import patchserver
# =============================================================================
# Setup
# =============================================================================
_TEST_URL = 'Data/Wordlists.dat'
_re_version = re.compile(r'[\d]+\.[\d]+\.[\d]+\.[\d]+', re.UNICODE)
# =============================================================================
# Fixtures
# =============================================================================
@pytest.fixture(scope='module')
def patch():
return patchserver.Patch()
# =============================================================================
# Tests
# =============================================================================
class TestPatch(object):
| def test_dst_file(self, patch):
with TemporaryDirectory() as temp:
patch.download(
file_path=_TEST_URL,
dst_file=os.path.join(temp, 'test.txt'),
)
def test_dst_dir(self, patch):
with TemporaryDirectory() as temp:
patch.download(
file_path=_TEST_URL,
dst_dir=temp,
)
def test_missing_dst_error(self, patch):
with pytest.raises(ValueError):
patch.download(
file_path=_TEST_URL,
)
def test_file_not_found(self, patch):
with pytest.raises(HTTPError):
patch.download_raw(
file_path='THIS_SHOULD_NOT_EXIST.FILE',
)
def test_version(self, patch):
assert _re_version.match(patch.version) is not None, 'patch.version ' \
'result is expected to match the x.x.x.x format' |
|
saba.py | # encoding=utf8
import logging
import numpy as np
from niapy.algorithms.algorithm import Algorithm
logging.basicConfig()
logger = logging.getLogger('niapy.algorithms.modified')
logger.setLevel('INFO')
__all__ = ['AdaptiveBatAlgorithm', 'SelfAdaptiveBatAlgorithm']
class | (Algorithm):
r"""Implementation of Adaptive bat algorithm.
Algorithm:
Adaptive bat algorithm
Date:
April 2019
Authors:
Klemen Berkovič
License:
MIT
Attributes:
Name (List[str]): List of strings representing algorithm name.
epsilon (float): Scaling factor.
alpha (float): Constant for updating loudness.
pulse_rate (float): Pulse rate.
min_frequency (float): Minimum frequency.
max_frequency (float): Maximum frequency.
See Also:
* :class:`niapy.algorithms.Algorithm`
"""
Name = ['AdaptiveBatAlgorithm', 'ABA']
@staticmethod
def info():
r"""Get basic information about the algorithm.
Returns:
str: Basic information.
See Also:
* :func:`niapy.algorithms.Algorithm.info`
"""
return r"""TODO"""
def __init__(self, population_size=100, starting_loudness=0.5, epsilon=0.001, alpha=1.0, pulse_rate=0.5,
min_frequency=0.0, max_frequency=2.0, *args, **kwargs):
"""Initialize AdaptiveBatAlgorithm.
Args:
population_size (Optional[int]): Population size.
starting_loudness (Optional[float]): Starting loudness.
epsilon (Optional[float]): Scaling factor.
alpha (Optional[float]): Constant for updating loudness.
pulse_rate (Optional[float]): Pulse rate.
min_frequency (Optional[float]): Minimum frequency.
max_frequency (Optional[float]): Maximum frequency.
See Also:
* :func:`niapy.algorithms.Algorithm.__init__`
"""
super().__init__(population_size, *args, **kwargs)
self.starting_loudness = starting_loudness
self.epsilon = epsilon
self.alpha = alpha
self.pulse_rate = pulse_rate
self.min_frequency = min_frequency
self.max_frequency = max_frequency
def set_parameters(self, population_size=100, starting_loudness=0.5, epsilon=0.001, alpha=1.0, pulse_rate=0.5,
min_frequency=0.0, max_frequency=2.0, **kwargs):
r"""Set the parameters of the algorithm.
Args:
population_size (Optional[int]): Population size.
starting_loudness (Optional[float]): Starting loudness.
epsilon (Optional[float]): Scaling factor.
alpha (Optional[float]): Constant for updating loudness.
pulse_rate (Optional[float]): Pulse rate.
min_frequency (Optional[float]): Minimum frequency.
max_frequency (Optional[float]): Maximum frequency.
See Also:
* :func:`niapy.algorithms.Algorithm.set_parameters`
"""
super().set_parameters(population_size=population_size, **kwargs)
self.starting_loudness = starting_loudness
self.epsilon = epsilon
self.alpha = alpha
self.pulse_rate = pulse_rate
self.min_frequency = min_frequency
self.max_frequency = max_frequency
def get_parameters(self):
r"""Get algorithm parameters.
Returns:
Dict[str, Any]: Arguments values.
See Also:
* :func:`niapy.algorithms.algorithm.Algorithm.get_parameters`
"""
d = super().get_parameters()
d.update({
'starting_loudness': self.starting_loudness,
'epsilon': self.epsilon,
'alpha': self.alpha,
'pulse_rate': self.pulse_rate,
'min_frequency': self.min_frequency,
'max_frequency': self.max_frequency
})
return d
def init_population(self, task):
r"""Initialize the starting population.
Args:
task (Task): Optimization task
Returns:
Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
1. New population.
2. New population fitness/function values.
3. Additional arguments:
* loudness (float): Loudness.
* velocities (numpy.ndarray[float]): Velocity.
See Also:
* :func:`niapy.algorithms.Algorithm.init_population`
"""
population, fitness, d = super().init_population(task)
loudness = np.full(self.population_size, self.starting_loudness)
velocities = np.zeros((self.population_size, task.dimension))
d.update({'loudness': loudness, 'velocities': velocities})
return population, fitness, d
def local_search(self, best, loudness, task, **kwargs):
r"""Improve the best solution according to the Yang (2010).
Args:
best (numpy.ndarray): Global best individual.
loudness (float): Loudness.
task (Task): Optimization task.
Returns:
numpy.ndarray: New solution based on global best individual.
"""
return task.repair(best + self.epsilon * loudness * self.normal(0, 1, task.dimension), rng=self.rng)
def update_loudness(self, loudness):
r"""Update loudness when the prey is found.
Args:
loudness (float): Loudness.
Returns:
float: New loudness.
"""
new_loudness = loudness * self.alpha
return new_loudness if new_loudness > 1e-13 else self.starting_loudness
def run_iteration(self, task, population, population_fitness, best_x, best_fitness, **params):
r"""Core function of Bat Algorithm.
Args:
task (Task): Optimization task.
population (numpy.ndarray): Current population
population_fitness (numpy.ndarray[float]): Current population fitness/function values
best_x (numpy.ndarray): Current best individual
best_fitness (float): Current best individual function/fitness value
params (Dict[str, Any]): Additional algorithm arguments
Returns:
Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
1. New population
2. New population fitness/function values
3. Additional arguments:
* loudness (numpy.ndarray[float]): Loudness.
* velocities (numpy.ndarray[float]): Velocities.
"""
loudness = params.pop('loudness')
velocities = params.pop('velocities')
for i in range(self.population_size):
frequency = self.min_frequency + (self.max_frequency - self.min_frequency) * self.random()
velocities[i] += (population[i] - best_x) * frequency
if self.random() > self.pulse_rate:
solution = self.local_search(best=best_x, loudness=loudness[i], task=task, i=i, Sol=population)
else:
solution = task.repair(population[i] + velocities[i], rng=self.rng)
new_fitness = task.eval(solution)
if (new_fitness <= population_fitness[i]) and (self.random() < loudness[i]):
population[i], population_fitness[i] = solution, new_fitness
if new_fitness <= best_fitness:
best_x, best_fitness, loudness[i] = solution.copy(), new_fitness, self.update_loudness(loudness[i])
return population, population_fitness, best_x, best_fitness, {'loudness': loudness, 'velocities': velocities}
class SelfAdaptiveBatAlgorithm(AdaptiveBatAlgorithm):
r"""Implementation of Hybrid bat algorithm.
Algorithm:
Self Adaptive Bat Algorithm
Date:
April 2019
Author:
Klemen Berkovič
License:
MIT
Reference paper:
Fister Jr., Iztok and Fister, Dusan and Yang, Xin-She. "A Hybrid Bat Algorithm". Elektrotehniški vestnik, 2013. 1-7.
Attributes:
Name (List[str]): List of strings representing algorithm name.
A_l (Optional[float]): Lower limit of loudness.
A_u (Optional[float]): Upper limit of loudness.
r_l (Optional[float]): Lower limit of pulse rate.
r_u (Optional[float]): Upper limit of pulse rate.
tao_1 (Optional[float]): Learning rate for loudness.
tao_2 (Optional[float]): Learning rate for pulse rate.
See Also:
* :class:`niapy.algorithms.basic.BatAlgorithm`
"""
Name = ['SelfAdaptiveBatAlgorithm', 'SABA']
@staticmethod
def info():
r"""Get basic information about the algorithm.
Returns:
str: Basic information.
See Also:
* :func:`niapy.algorithms.Algorithm.info`
"""
return r"""Fister Jr., Iztok and Fister, Dusan and Yang, Xin-She. "A Hybrid Bat Algorithm". Elektrotehniški vestnik, 2013. 1-7."""
def __init__(self, min_loudness=0.9, max_loudness=1.0, min_pulse_rate=0.001, max_pulse_rate=0.1, tao_1=0.1,
tao_2=0.1, *args, **kwargs):
"""Initialize SelfAdaptiveBatAlgorithm.
Args:
min_loudness (Optional[float]): Lower limit of loudness.
max_loudness (Optional[float]): Upper limit of loudness.
min_pulse_rate (Optional[float]): Lower limit of pulse rate.
max_pulse_rate (Optional[float]): Upper limit of pulse rate.
tao_1 (Optional[float]): Learning rate for loudness.
tao_2 (Optional[float]): Learning rate for pulse rate.
See Also:
* :func:`niapy.algorithms.modified.AdaptiveBatAlgorithm.__init__`
"""
super().__init__(*args, **kwargs)
self.min_loudness = min_loudness
self.max_loudness = max_loudness
self.min_pulse_rate = min_pulse_rate
self.max_pulse_rate = max_pulse_rate
self.tao_1 = tao_1
self.tao_2 = tao_2
def set_parameters(self, min_loudness=0.9, max_loudness=1.0, min_pulse_rate=0.001, max_pulse_rate=0.1, tao_1=0.1, tao_2=0.1, **kwargs):
r"""Set core parameters of HybridBatAlgorithm algorithm.
Args:
min_loudness (Optional[float]): Lower limit of loudness.
max_loudness (Optional[float]): Upper limit of loudness.
min_pulse_rate (Optional[float]): Lower limit of pulse rate.
max_pulse_rate (Optional[float]): Upper limit of pulse rate.
tao_1 (Optional[float]): Learning rate for loudness.
tao_2 (Optional[float]): Learning rate for pulse rate.
See Also:
* :func:`niapy.algorithms.modified.AdaptiveBatAlgorithm.set_parameters`
"""
super().set_parameters(**kwargs)
self.min_loudness = min_loudness
self.max_loudness = max_loudness
self.min_pulse_rate = min_pulse_rate
self.max_pulse_rate = max_pulse_rate
self.tao_1 = tao_1
self.tao_2 = tao_2
def get_parameters(self):
r"""Get parameters of the algorithm.
Returns:
Dict[str, Any]: Parameters of the algorithm.
See Also:
* :func:`niapy.algorithms.modified.AdaptiveBatAlgorithm.get_parameters`
"""
d = AdaptiveBatAlgorithm.get_parameters(self)
d.update({
'min_loudness': self.min_loudness,
'max_loudness': self.max_loudness,
'min_pulse_rate': self.min_pulse_rate,
'max_pulse_rate': self.max_pulse_rate,
'tao_1': self.tao_1,
'tao_2': self.tao_2
})
return d
def init_population(self, task):
population, fitness, d = super().init_population(task)
pulse_rates = np.full(self.population_size, self.pulse_rate)
d.update({'pulse_rates': pulse_rates})
return population, fitness, d
def self_adaptation(self, loudness, pulse_rate):
r"""Adaptation step.
Args:
loudness (float): Current loudness.
pulse_rate (float): Current pulse rate.
Returns:
Tuple[float, float]:
1. New loudness.
2. Nwq pulse rate.
"""
return self.min_loudness + self.random() * (
self.max_loudness - self.min_loudness) if self.random() < self.tao_1 else loudness, self.min_pulse_rate + self.random() * (
self.max_pulse_rate - self.min_pulse_rate) if self.random() < self.tao_2 else pulse_rate
def run_iteration(self, task, population, population_fitness, best_x, best_fitness, **params):
r"""Core function of Bat Algorithm.
Args:
task (Task): Optimization task.
population (numpy.ndarray): Current population
population_fitness (numpy.ndarray[float]): Current population fitness/function values
best_x (numpy.ndarray): Current best individual
best_fitness (float): Current best individual function/fitness value
params (Dict[str, Any]): Additional algorithm arguments
Returns:
Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
1. New population
2. New population fitness/function values
3. Additional arguments:
* loudness (numpy.ndarray[float]): Loudness.
* pulse_rates (numpy.ndarray[float]): Pulse rate.
* velocities (numpy.ndarray[float]): Velocities.
"""
loudness = params.pop('loudness')
pulse_rates = params.pop('pulse_rates')
velocities = params.pop('velocities')
for i in range(self.population_size):
loudness[i], pulse_rates[i] = self.self_adaptation(loudness[i], pulse_rates[i])
frequency = self.min_frequency + (self.max_frequency - self.min_frequency) * self.random()
velocities[i] += (population[i] - best_x) * frequency
if self.random() > pulse_rates[i]:
solution = self.local_search(best=best_x, loudness=loudness[i], task=task, i=i, population=population)
else:
solution = task.repair(population[i] + velocities[i], rng=self.rng)
new_fitness = task.eval(solution)
if (new_fitness <= population_fitness[i]) and (self.random() < (self.min_loudness - loudness[i]) / self.starting_loudness):
population[i], population_fitness[i] = solution, new_fitness
if new_fitness <= best_fitness:
best_x, best_fitness = solution.copy(), new_fitness
return population, population_fitness, best_x, best_fitness, {'loudness': loudness, 'pulse_rates': pulse_rates, 'velocities': velocities}
| AdaptiveBatAlgorithm |
util.d.ts | export declare function ipfsHashToHexMultiHash(ipfsHash: string): MultiHash;
export declare function hexMultiHashToIpfsHash(multiHash: MultiHash): string; | import { MultiHash } from './types'; |
|
Base.py | from abc import ABCMeta, abstractmethod
from ..utils.activations import *
class NetworkBase(metaclass=ABCMeta):
def __init__(self, sizes, activation, last_layer, **kwargs):
self.sizes = sizes
self.num_layers = len(sizes)
if activation.lower() == "sigmoid":
self.activation = Sigmoid()
# self.activation_derivative = sigmoid_derivative
elif activation.lower() == "relu":
self.activation = ReLU()
# self.activation_derivative = relu_derivative
elif activation.lower() == "tanh":
self.activation = Tanh()
elif activation.lower() == "softplus":
self.activation = Softplus()
elif activation.lower() == "leaky_relu" or "leakyrelu":
if "alpha" in kwargs:
self.activation = LeakyReLU(kwargs.get("alpha"))
else:
self.activation = LeakyReLU()
elif activation.lower() == "elu":
if "alpha" in kwargs:
self.activation = ELU(kwargs.get("alpha"))
else:
self.activation = ELU()
elif activation.lower() == "selu": | if last_layer.lower() == "softmax":
self.last_layer = Softmax()
@abstractmethod
def predict(self):
raise NotImplementedError
@abstractmethod
def backprop(self):
raise NotImplementedError | self.activation = Selu()
|
evaluate_application_response.go | package evaluateapplication
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78 "github.com/microsoftgraph/msgraph-beta-sdk-go/models/security"
)
// EvaluateApplicationResponse provides operations to call the evaluateApplication method.
type EvaluateApplicationResponse struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{}
// The value property
value []i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.InformationProtectionActionable
}
// NewEvaluateApplicationResponse instantiates a new evaluateApplicationResponse and sets the default values.
func NewEvaluateApplicationResponse()(*EvaluateApplicationResponse) |
// CreateEvaluateApplicationResponseFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateEvaluateApplicationResponseFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewEvaluateApplicationResponse(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *EvaluateApplicationResponse) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *EvaluateApplicationResponse) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error))
res["value"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.CreateInformationProtectionActionFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.InformationProtectionActionable, len(val))
for i, v := range val {
res[i] = v.(i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.InformationProtectionActionable)
}
m.SetValue(res)
}
return nil
}
return res
}
// GetValue gets the value property value. The value property
func (m *EvaluateApplicationResponse) GetValue()([]i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.InformationProtectionActionable) {
if m == nil {
return nil
} else {
return m.value
}
}
// Serialize serializes information the current object
func (m *EvaluateApplicationResponse) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
if m.GetValue() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetValue()))
for i, v := range m.GetValue() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err := writer.WriteCollectionOfObjectValues("value", cast)
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *EvaluateApplicationResponse) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetValue sets the value property value. The value property
func (m *EvaluateApplicationResponse) SetValue(value []i084fa7ab3bba802bf5cc3b408e230cc64c167a57976e0d42c37e17154afd5b78.InformationProtectionActionable)() {
if m != nil {
m.value = value
}
}
| {
m := &EvaluateApplicationResponse{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
} |
vesting_state.rs | // Copyright 2019-2022 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use std::{iter, mem};
use fvm_ipld_encoding::tuple::*;
use fvm_shared::bigint::{bigint_ser, Integer};
use fvm_shared::clock::{ChainEpoch, QuantSpec};
use fvm_shared::econ::TokenAmount;
use itertools::{EitherOrBoth, Itertools};
use num_traits::Zero;
use super::VestSpec;
// Represents miner funds that will vest at the given epoch.
#[derive(Serialize_tuple, Deserialize_tuple)]
pub struct VestingFund {
pub epoch: ChainEpoch,
#[serde(with = "bigint_ser")]
pub amount: TokenAmount,
}
/// Represents the vesting table state for the miner.
/// It is a slice of (VestingEpoch, VestingAmount).
/// The slice will always be sorted by the VestingEpoch.
#[derive(Serialize_tuple, Deserialize_tuple, Default)]
pub struct VestingFunds {
pub funds: Vec<VestingFund>,
}
impl VestingFunds {
pub fn new() -> Self {
Default::default()
}
pub fn unlock_vested_funds(&mut self, current_epoch: ChainEpoch) -> TokenAmount {
// TODO: the funds are sorted by epoch, so we could do a binary search here
let i = self
.funds
.iter()
.position(|fund| fund.epoch >= current_epoch)
.unwrap_or(self.funds.len());
self.funds.drain(..i).map(|fund| fund.amount).sum()
}
pub fn add_locked_funds(
&mut self,
current_epoch: ChainEpoch,
vesting_sum: &TokenAmount,
proving_period_start: ChainEpoch,
spec: &VestSpec,
) |
pub fn unlock_unvested_funds(
&mut self,
current_epoch: ChainEpoch,
target: &TokenAmount,
) -> TokenAmount {
let mut amount_unlocked = TokenAmount::from(0);
let mut last = None;
let mut start = 0;
for (i, vf) in self.funds.iter_mut().enumerate() {
if &amount_unlocked >= target {
break;
}
if vf.epoch >= current_epoch {
let unlock_amount = std::cmp::min(target - &amount_unlocked, vf.amount.clone());
amount_unlocked += &unlock_amount;
let new_amount = &vf.amount - &unlock_amount;
if new_amount.is_zero() {
last = Some(i);
} else {
vf.amount = new_amount;
}
} else {
start = i + 1;
}
}
if let Some(end) = last {
self.funds.drain(start..=end);
}
amount_unlocked
}
}
| {
// Quantization is aligned with when regular cron will be invoked, in the last epoch of deadlines.
let vest_begin = current_epoch + spec.initial_delay; // Nothing unlocks here, this is just the start of the clock.
let vest_period = TokenAmount::from(spec.vest_period);
let mut vested_so_far = TokenAmount::zero();
let mut epoch = vest_begin;
// Create an iterator for the vesting schedule we're going to "join" with the current
// vesting schedule.
let new_funds = iter::from_fn(|| {
if vested_so_far >= *vesting_sum {
return None;
}
epoch += spec.step_duration;
let vest_epoch = QuantSpec { unit: spec.quantization, offset: proving_period_start }
.quantize_up(epoch);
let elapsed = vest_epoch - vest_begin;
let target_vest = if elapsed < spec.vest_period {
// Linear vesting
(vesting_sum * elapsed).div_floor(&vest_period)
} else {
vesting_sum.clone()
};
let vest_this_time = &target_vest - &vested_so_far;
vested_so_far = target_vest;
Some(VestingFund { epoch: vest_epoch, amount: vest_this_time })
});
// Take the old funds array and replace it with a new one.
let funds_len = self.funds.len();
let old_funds = mem::replace(&mut self.funds, Vec::with_capacity(funds_len));
// Fill back in the funds array, merging existing and new schedule.
self.funds.extend(
old_funds.into_iter().merge_join_by(new_funds, |a, b| a.epoch.cmp(&b.epoch)).map(
|item| match item {
EitherOrBoth::Left(a) => a,
EitherOrBoth::Right(b) => b,
EitherOrBoth::Both(a, b) => {
VestingFund { epoch: a.epoch, amount: a.amount + b.amount }
}
},
),
);
} |
experimental.go | /*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package unversioned
import (
"encoding/json"
"fmt"
"strings"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/latest"
"k8s.io/kubernetes/pkg/version"
)
// Interface holds the experimental methods for clients of Kubernetes
// to allow mock testing.
// Experimental features are not supported and may be changed or removed in
// incompatible ways at any time.
type ExperimentalInterface interface {
VersionInterface
HorizontalPodAutoscalersNamespacer
ScaleNamespacer
DaemonSetsNamespacer
DeploymentsNamespacer
JobsNamespacer
}
// ExperimentalClient is used to interact with experimental Kubernetes features.
// Experimental features are not supported and may be changed or removed in
// incompatible ways at any time.
type ExperimentalClient struct {
*RESTClient
}
// ServerVersion retrieves and parses the server's version.
func (c *ExperimentalClient) ServerVersion() (*version.Info, error) {
body, err := c.Get().AbsPath("/version").Do().Raw()
if err != nil {
return nil, err
}
var info version.Info
err = json.Unmarshal(body, &info)
if err != nil {
return nil, fmt.Errorf("got '%s': %v", string(body), err)
}
return &info, nil
}
// ServerAPIVersions retrieves and parses the list of experimental API versions the
// server supports.
func (c *ExperimentalClient) ServerAPIVersions() (*api.APIVersions, error) {
body, err := c.Get().UnversionedPath("").Do().Raw()
if err != nil {
return nil, err
}
var v api.APIVersions
err = json.Unmarshal(body, &v)
if err != nil {
return nil, fmt.Errorf("got '%s': %v", string(body), err)
}
return &v, nil
}
func (c *ExperimentalClient) HorizontalPodAutoscalers(namespace string) HorizontalPodAutoscalerInterface {
return newHorizontalPodAutoscalers(c, namespace)
}
func (c *ExperimentalClient) Scales(namespace string) ScaleInterface {
return newScales(c, namespace)
}
func (c *ExperimentalClient) DaemonSets(namespace string) DaemonSetInterface {
return newDaemonSets(c, namespace)
}
func (c *ExperimentalClient) Deployments(namespace string) DeploymentInterface {
return newDeployments(c, namespace)
}
func (c *ExperimentalClient) Jobs(namespace string) JobInterface {
return newJobs(c, namespace)
}
// NewExperimental creates a new ExperimentalClient for the given config. This client
// provides access to experimental Kubernetes features.
// Experimental features are not supported and may be changed or removed in
// incompatible ways at any time.
func NewExperimental(c *Config) (*ExperimentalClient, error) {
config := *c
if err := setExperimentalDefaults(&config); err != nil {
return nil, err
}
client, err := RESTClientFor(&config)
if err != nil {
return nil, err
}
return &ExperimentalClient{client}, nil
}
// NewExperimentalOrDie creates a new ExperimentalClient for the given config and
// panics if there is an error in the config.
// Experimental features are not supported and may be changed or removed in
// incompatible ways at any time.
func NewExperimentalOrDie(c *Config) *ExperimentalClient {
client, err := NewExperimental(c)
if err != nil {
panic(err)
}
return client
}
func setExperimentalDefaults(config *Config) error {
// if experimental group is not registered, return an error
g, err := latest.Group("experimental")
if err != nil {
return err
}
config.Prefix = "apis/"
if config.UserAgent == "" {
config.UserAgent = DefaultKubernetesUserAgent()
}
// TODO: Unconditionally set the config.Version, until we fix the config.
//if config.Version == "" {
config.Version = g.GroupVersion
//}
versionInterfaces, err := g.InterfacesFor(config.Version)
if err != nil {
return fmt.Errorf("Experimental API version '%s' is not recognized (valid values: %s)",
config.Version, strings.Join(latest.GroupOrDie("experimental").Versions, ", ")) | }
if config.Burst == 0 {
config.Burst = 10
}
return nil
} | }
config.Codec = versionInterfaces.Codec
if config.QPS == 0 {
config.QPS = 5 |
MyUOW.ts | import { inject, injectable } from 'inversify';
import { TypeOrmUnitOfWork, RepositoryMap } from '../../../src/adapters/UnitOfWork';
import { MyAggregateRepository } from '../repositories/MyAggregateRepository';
import { myRepos, myTypes } from '../config';
@injectable()
export class MyUOW extends TypeOrmUnitOfWork { | }
} | public constructor(@inject(myTypes.MyAggregateRepository) myAggregates: MyAggregateRepository) {
const repositories: RepositoryMap = {};
repositories[myRepos.myAggregates] = myAggregates;
super(repositories); |
default_context.rs | use crate::prelude::*;
use nu_engine::whole_stream_command;
use std::error::Error;
pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Box<dyn Error>> | {
let context = EvaluationContext::basic();
{
use crate::commands::*;
context.add_commands(vec![
// Fundamentals
whole_stream_command(NuPlugin),
whole_stream_command(Let),
whole_stream_command(LetEnv),
whole_stream_command(UnletEnv),
whole_stream_command(LoadEnv),
whole_stream_command(Def),
whole_stream_command(Source),
whole_stream_command(Alias),
whole_stream_command(Ignore),
// System/file operations
whole_stream_command(Exec),
whole_stream_command(Pwd),
whole_stream_command(Ls),
whole_stream_command(Du),
whole_stream_command(Cd),
whole_stream_command(Remove),
whole_stream_command(Open),
whole_stream_command(Pathvar),
whole_stream_command(PathvarAdd),
whole_stream_command(PathvarRemove),
whole_stream_command(PathvarReset),
whole_stream_command(PathvarAppend),
whole_stream_command(PathvarSave),
whole_stream_command(Config),
whole_stream_command(ConfigGet),
whole_stream_command(ConfigSet),
whole_stream_command(ConfigSetInto),
whole_stream_command(ConfigClear),
whole_stream_command(ConfigRemove),
whole_stream_command(ConfigPath),
whole_stream_command(Help),
whole_stream_command(History),
whole_stream_command(Save),
whole_stream_command(Touch),
whole_stream_command(Cpy),
whole_stream_command(Date),
whole_stream_command(DateListTimeZone),
whole_stream_command(DateNow),
whole_stream_command(DateToTable),
whole_stream_command(DateToTimeZone),
whole_stream_command(DateFormat),
whole_stream_command(DateHumanize),
whole_stream_command(Cal),
whole_stream_command(Mkdir),
whole_stream_command(Mv),
whole_stream_command(Kill),
whole_stream_command(Version),
whole_stream_command(Clear),
whole_stream_command(Describe),
whole_stream_command(Which),
whole_stream_command(Debug),
whole_stream_command(WithEnv),
whole_stream_command(Do),
whole_stream_command(Sleep),
// Statistics
whole_stream_command(Size),
whole_stream_command(Length),
whole_stream_command(Benchmark),
// Metadata
whole_stream_command(Tags),
// Shells
whole_stream_command(Next),
whole_stream_command(Previous),
whole_stream_command(Shells),
whole_stream_command(Enter),
whole_stream_command(Exit),
// Viz
whole_stream_command(Chart),
// Viewers
whole_stream_command(Autoview),
whole_stream_command(Table),
// Text manipulation
whole_stream_command(Hash),
whole_stream_command(HashBase64),
whole_stream_command(HashMd5::default()),
whole_stream_command(HashSha256::default()),
whole_stream_command(Split),
whole_stream_command(SplitColumn),
whole_stream_command(SplitRow),
whole_stream_command(SplitChars),
whole_stream_command(Lines),
whole_stream_command(Echo),
whole_stream_command(Parse),
whole_stream_command(Str),
whole_stream_command(StrToDecimal),
whole_stream_command(StrToInteger),
whole_stream_command(StrDowncase),
whole_stream_command(StrUpcase),
whole_stream_command(StrCapitalize),
whole_stream_command(StrFindReplace),
whole_stream_command(StrSubstring),
whole_stream_command(StrToDatetime),
whole_stream_command(StrContains),
whole_stream_command(StrIndexOf),
whole_stream_command(StrTrim),
whole_stream_command(StrStartsWith),
whole_stream_command(StrEndsWith),
whole_stream_command(StrCollect),
whole_stream_command(StrLength),
whole_stream_command(StrLPad),
whole_stream_command(StrReverse),
whole_stream_command(StrRPad),
whole_stream_command(StrCamelCase),
whole_stream_command(StrPascalCase),
whole_stream_command(StrKebabCase),
whole_stream_command(StrSnakeCase),
whole_stream_command(StrScreamingSnakeCase),
whole_stream_command(BuildString),
whole_stream_command(Ansi),
whole_stream_command(AnsiStrip),
whole_stream_command(AnsiGradient),
whole_stream_command(Char),
// Column manipulation
whole_stream_command(DropColumn),
whole_stream_command(MoveColumn),
whole_stream_command(Reject),
whole_stream_command(Select),
whole_stream_command(Get),
whole_stream_command(Update),
whole_stream_command(Insert),
whole_stream_command(Into),
whole_stream_command(IntoBinary),
whole_stream_command(IntoInt),
whole_stream_command(IntoFilepath),
whole_stream_command(IntoString),
whole_stream_command(SplitBy),
// Row manipulation
whole_stream_command(All),
whole_stream_command(Any),
whole_stream_command(Reverse),
whole_stream_command(Append),
whole_stream_command(Prepend),
whole_stream_command(SortBy),
whole_stream_command(GroupBy),
whole_stream_command(GroupByDate),
whole_stream_command(First),
whole_stream_command(Last),
whole_stream_command(Every),
whole_stream_command(Nth),
whole_stream_command(Drop),
whole_stream_command(Format),
whole_stream_command(FileSize),
whole_stream_command(Where),
whole_stream_command(If),
whole_stream_command(Compact),
whole_stream_command(Default),
whole_stream_command(Skip),
whole_stream_command(SkipUntil),
whole_stream_command(SkipWhile),
whole_stream_command(Keep),
whole_stream_command(KeepUntil),
whole_stream_command(KeepWhile),
whole_stream_command(Range),
whole_stream_command(Rename),
whole_stream_command(Uniq),
whole_stream_command(Each),
whole_stream_command(EachGroup),
whole_stream_command(EachWindow),
whole_stream_command(Empty),
whole_stream_command(ForIn),
// Table manipulation
whole_stream_command(Flatten),
whole_stream_command(Merge),
whole_stream_command(Shuffle),
whole_stream_command(Wrap),
whole_stream_command(Pivot),
whole_stream_command(Headers),
whole_stream_command(Reduce),
whole_stream_command(Roll),
whole_stream_command(RollColumn),
whole_stream_command(RollUp),
whole_stream_command(Rotate),
whole_stream_command(RotateCounterClockwise),
whole_stream_command(Collect),
// Data processing
whole_stream_command(Histogram),
whole_stream_command(Autoenv),
whole_stream_command(AutoenvTrust),
whole_stream_command(AutoenvUntrust),
whole_stream_command(Math),
whole_stream_command(MathAbs),
whole_stream_command(MathAverage),
whole_stream_command(MathEval),
whole_stream_command(MathMedian),
whole_stream_command(MathMinimum),
whole_stream_command(MathMode),
whole_stream_command(MathMaximum),
whole_stream_command(MathStddev),
whole_stream_command(MathSummation),
whole_stream_command(MathVariance),
whole_stream_command(MathProduct),
whole_stream_command(MathRound),
whole_stream_command(MathFloor),
whole_stream_command(MathCeil),
whole_stream_command(MathSqrt),
// File format output
whole_stream_command(To),
whole_stream_command(ToCsv),
whole_stream_command(ToHtml),
whole_stream_command(ToJson),
whole_stream_command(ToMarkdown),
whole_stream_command(ToToml),
whole_stream_command(ToTsv),
whole_stream_command(ToUrl),
whole_stream_command(ToYaml),
whole_stream_command(ToXml),
// File format input
whole_stream_command(From),
whole_stream_command(FromCsv),
whole_stream_command(FromEml),
whole_stream_command(FromTsv),
whole_stream_command(FromSsv),
whole_stream_command(FromIni),
whole_stream_command(FromJson),
whole_stream_command(FromOds),
whole_stream_command(FromToml),
whole_stream_command(FromUrl),
whole_stream_command(FromXlsx),
whole_stream_command(FromXml),
whole_stream_command(FromYaml),
whole_stream_command(FromYml),
whole_stream_command(FromIcs),
whole_stream_command(FromVcf),
// "Private" commands (not intended to be accessed directly)
whole_stream_command(RunExternalCommand { interactive }),
// Random value generation
whole_stream_command(Random),
whole_stream_command(RandomBool),
whole_stream_command(RandomDice),
#[cfg(feature = "uuid_crate")]
whole_stream_command(RandomUUID),
whole_stream_command(RandomInteger),
whole_stream_command(RandomDecimal),
whole_stream_command(RandomChars),
// Path
whole_stream_command(PathBasename),
whole_stream_command(PathCommand),
whole_stream_command(PathDirname),
whole_stream_command(PathExists),
whole_stream_command(PathExpand),
whole_stream_command(PathJoin),
whole_stream_command(PathParse),
whole_stream_command(PathRelativeTo),
whole_stream_command(PathSplit),
whole_stream_command(PathType),
// Url
whole_stream_command(UrlCommand),
whole_stream_command(UrlScheme),
whole_stream_command(UrlPath),
whole_stream_command(UrlHost),
whole_stream_command(UrlQuery),
whole_stream_command(Seq),
whole_stream_command(SeqDates),
whole_stream_command(TermSize),
]);
//Dataframe commands
#[cfg(feature = "dataframe")]
context.add_commands(vec![
whole_stream_command(DataFrame),
whole_stream_command(DataFrameOpen),
whole_stream_command(DataFrameList),
whole_stream_command(DataFrameGroupBy),
whole_stream_command(DataFrameAggregate),
whole_stream_command(DataFrameShow),
whole_stream_command(DataFrameSample),
whole_stream_command(DataFrameJoin),
whole_stream_command(DataFrameDrop),
whole_stream_command(DataFrameSelect),
whole_stream_command(DataFrameDTypes),
whole_stream_command(DataFrameDummies),
whole_stream_command(DataFrameFirst),
whole_stream_command(DataFrameLast),
whole_stream_command(DataFrameSlice),
whole_stream_command(DataFrameMelt),
whole_stream_command(DataFramePivot),
whole_stream_command(DataFrameWhere),
whole_stream_command(DataFrameToDF),
whole_stream_command(DataFrameToParquet),
whole_stream_command(DataFrameToCsv),
whole_stream_command(DataFrameSort),
whole_stream_command(DataFrameGet),
whole_stream_command(DataFrameDropDuplicates),
whole_stream_command(DataFrameDropNulls),
whole_stream_command(DataFrameColumn),
whole_stream_command(DataFrameWithColumn),
whole_stream_command(DataFrameFilter),
whole_stream_command(DataFrameSeriesRename),
whole_stream_command(DataFrameValueCounts),
whole_stream_command(DataFrameIsNull),
whole_stream_command(DataFrameIsNotNull),
whole_stream_command(DataFrameAllTrue),
whole_stream_command(DataFrameAllFalse),
whole_stream_command(DataFrameArgMax),
whole_stream_command(DataFrameArgMin),
whole_stream_command(DataFrameArgTrue),
whole_stream_command(DataFrameArgUnique),
whole_stream_command(DataFrameArgSort),
whole_stream_command(DataFrameUnique),
whole_stream_command(DataFrameNUnique),
whole_stream_command(DataFrameNNull),
whole_stream_command(DataFrameIsUnique),
whole_stream_command(DataFrameIsDuplicated),
whole_stream_command(DataFrameIsIn),
whole_stream_command(DataFrameShift),
whole_stream_command(DataFrameSet),
whole_stream_command(DataFrameNot),
whole_stream_command(DataFrameTake),
whole_stream_command(DataFrameSetWithIdx),
whole_stream_command(DataFrameShape),
whole_stream_command(DataFrameReplace),
whole_stream_command(DataFrameReplaceAll),
whole_stream_command(DataFrameStringLengths),
whole_stream_command(DataFrameContains),
whole_stream_command(DataFrameToLowercase),
whole_stream_command(DataFrameToUppercase),
whole_stream_command(DataFrameStringSlice),
whole_stream_command(DataFrameConcatenate),
whole_stream_command(DataFrameAppend),
whole_stream_command(DataFrameGetHour),
whole_stream_command(DataFrameGetMinute),
whole_stream_command(DataFrameGetSecond),
whole_stream_command(DataFrameGetDay),
whole_stream_command(DataFrameGetMonth),
whole_stream_command(DataFrameGetYear),
whole_stream_command(DataFrameGetWeek),
whole_stream_command(DataFrameGetWeekDay),
whole_stream_command(DataFrameGetOrdinal),
whole_stream_command(DataFrameGetNanoSecond),
whole_stream_command(DataFrameStrFTime),
whole_stream_command(DataFrameDescribe),
]);
#[cfg(feature = "clipboard-cli")]
{
context.add_commands(vec![
whole_stream_command(crate::commands::Clip),
whole_stream_command(crate::commands::Paste),
]);
}
}
Ok(context)
} |
|
ListVaultsCommand.ts | import { GlacierClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../GlacierClient.ts";
import { ListVaultsInput, ListVaultsOutput } from "../models/models_0.ts";
import {
deserializeAws_restJson1ListVaultsCommand,
serializeAws_restJson1ListVaultsCommand,
} from "../protocols/Aws_restJson1.ts";
import { getSerdePlugin } from "../../middleware-serde/mod.ts";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "../../protocol-http/mod.ts";
import { Command as $Command } from "../../smithy-client/mod.ts";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
SerdeContext as __SerdeContext,
} from "../../types/mod.ts";
export type ListVaultsCommandInput = ListVaultsInput;
export type ListVaultsCommandOutput = ListVaultsOutput & __MetadataBearer;
/** | * list, the response <code>marker</code> field contains the vault Amazon Resource Name (ARN)
* at which to continue the list with a new List Vaults request; otherwise, the
* <code>marker</code> field is <code>null</code>. To return a list of vaults that begins
* at a specific vault, set the <code>marker</code> request parameter to the vault ARN you
* obtained from a previous List Vaults request. You can also limit the number of vaults
* returned in the response by specifying the <code>limit</code> parameter in the request. </p>
*
* <p>An AWS account has full permission to perform all operations (actions). However, AWS
* Identity and Access Management (IAM) users don't have any permissions by default. You must
* grant them explicit permission to perform specific actions. For more information, see
* <a href="https://docs.aws.amazon.com/amazonglacier/latest/dev/using-iam-with-amazon-glacier.html">Access Control Using
* AWS Identity and Access Management (IAM)</a>.</p>
* <p>For conceptual information and underlying REST API, see <a href="https://docs.aws.amazon.com/amazonglacier/latest/dev/retrieving-vault-info.html">Retrieving Vault Metadata in
* Amazon S3 Glacier</a> and <a href="https://docs.aws.amazon.com/amazonglacier/latest/dev/api-vaults-get.html">List Vaults </a> in the
* <i>Amazon Glacier Developer Guide</i>. </p>
*/
export class ListVaultsCommand extends $Command<
ListVaultsCommandInput,
ListVaultsCommandOutput,
GlacierClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: ListVaultsCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: GlacierClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<ListVaultsCommandInput, ListVaultsCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const clientName = "GlacierClient";
const commandName = "ListVaultsCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: ListVaultsInput.filterSensitiveLog,
outputFilterSensitiveLog: ListVaultsOutput.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: ListVaultsCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_restJson1ListVaultsCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<ListVaultsCommandOutput> {
return deserializeAws_restJson1ListVaultsCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
} | * <p>This operation lists all vaults owned by the calling user's account. The list
* returned in the response is ASCII-sorted by vault name.</p>
*
* <p>By default, this operation returns up to 10 items. If there are more vaults to |
Calendar.service.ts | declare var Ext: any;
import { Injectable } from '@angular/core';
declare var KitchenSink: any;
@Injectable()
export class | {
constructor() {
this.init();
}
init = function () {
Ext.define('KitchenSink.data.calendar.Util', {
singleton: true,
filter: function (data, start, end) {
var R = Ext.calendar.date.Range,
range = new R(start, end);
return Ext.Array.filter(data, function (event) {
return range.overlaps(R.fly(event.startDate, event.endDate));
});
},
find: function (base, d, incr) {
var D = Ext.Date;
base = D.clone(base);
while (base.getDay() !== d) {
base = D.add(base, D.DAY, incr);
}
return base;
},
findNext: function (base, d) {
return this.find(base, d, 1);
},
findPrevious: function (base, d) {
return this.find(base, d, -1);
},
generateIds: function (data, start) {
Ext.Array.forEach(data, function (item) {
item.id = ++start;
});
return data;
},
generateOutput: function (data, calendarId, ctx) {
var filtered = this.filter(data, ctx.params.startDate, ctx.params.endDate);
return this.prepare(filtered, calendarId);
},
prepare: function (data, calendarId) {
var D = Ext.Date;
return Ext.Array.map(data, function (event) {
event = Ext.apply({}, event);
event.calendarId = calendarId;
event.startDate = D.format(D.localToUtc(event.startDate), 'C');
event.endDate = D.format(D.localToUtc(event.endDate), 'C');
return event;
});
},
setDate: function (base, d, h, m) {
var ret = Ext.Date.clone(base);
if (d !== undefined) {
ret.setDate(d);
}
if (h !== undefined) {
ret.setHours(h);
}
if (m !== undefined) {
ret.setMinutes(m);
}
return ret;
},
setHours: function (base, h, m) {
return this.setDate(base, undefined, h, m);
}
});
Ext.define('KitchenSink.data.calendar.Full', {}, function () {
function halfhour() {
return Math.random() < 0.5 ? 30 : 0;
}
function getRandom(arr) {
var n = N.randomInt(0, arr.length - 1);
return arr[n];
}
var places = ['London', 'Paris', 'Munich', 'Amsterdam', 'Rome'],
people = ['Louis', 'Mitch', 'Ava', 'Shelly', 'Vicki', 'Stefanie', 'Jason', 'Elena', 'Randy', 'Fred', 'Debbie'],
teams1 = ['Release', 'QA', 'Development', 'PM', 'R&D'],
teams2 = ['Marketing', 'Sales'],
clients1 = ['Client A', 'Client B', 'Client C', 'Client D'],
clients2 = ['Client E', 'Client F', 'Client G', 'Client H'],
workActions = ['Meet', 'Call', 'Review'],
leisure = ['Hike', 'Gallery', 'Gaming', 'Theatre', 'Bowling', 'Concert'];
var U = KitchenSink.data.calendar.Util,
D = Ext.Date,
N = Ext.Number,
now = D.clearTime(new Date(), true),
start = D.subtract(D.subtract(now, D.YEAR, 1), D.DAY, 15),
end = D.add(D.add(now, D.YEAR, 1), D.DAY, 15),
data = {
work: (function () {
var current = D.clone(start),
data = [],
incr, r, n;
while (current < end) {
incr = 1;
if (!D.isWeekend(current)) {
r = Math.random();
if (r > 0.25) {
// Morning event
if (Math.random() < 0.5) {
n = N.randomInt(8, 12);
data.push({
title: getRandom(workActions) + ' with ' + getRandom(teams1),
startDate: U.setDate(current, undefined, n, halfhour()),
endDate: U.setDate(current, undefined, N.randomInt(n + 1, 13), halfhour())
});
}
// Afternoon event
if (Math.random() > 0.5) {
n = N.randomInt(14, 18);
data.push({
title: getRandom(workActions) + ' with ' + getRandom(teams1),
startDate: U.setDate(current, undefined, n, halfhour()),
endDate: U.setDate(current, undefined, N.randomInt(n + 1, 18), halfhour())
});
}
} else if (r > 0.2) {
incr = D.FRIDAY - current.getDay() + 1;
data.push({
title: 'In ' + getRandom(places) + ' office',
startDate: current,
endDate: D.add(current, D.DAY, incr),
allDay: true
});
}
}
current = D.add(current, D.DAY, incr);
}
return U.generateIds(data, 3000);
})(),
personal: (function () {
var current = D.clone(start),
data = [],
incr, r, n;
while (current < end) {
incr = 1;
if (D.isWeekend(current)) {
r = Math.random();
if (current.getDay() === D.SATURDAY && r < 0.1) {
data.push({
title: 'Weekend away in ' + getRandom(places),
startDate: current,
endDate: D.add(current, D.DAY, 2),
allDay: true
});
incr = 2;
} else if (r < 0.3) {
data.push({
title: getRandom(leisure) + ' with ' + getRandom(people),
startDate: current,
endDate: D.add(current, D.DAY, 1),
allDay: true
});
} else if (r < 0.7) {
n = N.randomInt(9, 18);
data.push({
title: getRandom(leisure) + ' with ' + getRandom(people),
startDate: U.setDate(current, undefined, n, halfhour()),
endDate: U.setDate(current, undefined, N.randomInt(n + 1, 21), halfhour())
});
}
} else {
if (Math.random() > 0.7) {
data.push({
title: 'Dinner with ' + getRandom(people),
startDate: U.setDate(current, undefined, 19, 30),
endDate: U.setDate(current, undefined, 22)
});
}
}
current = D.add(current, D.DAY, incr);
}
return U.generateIds(data, 6000);
})(),
projectZeus: (function () {
var current = D.clone(start),
data = [],
deliverables = 0,
incr, r, n;
while (current < end) {
incr = 1;
if (!D.isWeekend(current)) {
if (current.getDay() === D.TUESDAY || current.getDay() === D.THURSDAY) {
data.push({
title: 'Scrum',
startDate: U.setDate(current, undefined, 9),
endDate: U.setDate(current, undefined, 9, 30)
});
}
r = Math.random();
if (r > 0.6) {
n = N.randomInt(11, 15);
data.push({
title: getRandom(workActions) + ' with ' + getRandom(teams2),
startDate: U.setDate(current, undefined, n, halfhour()),
endDate: U.setDate(current, undefined, N.randomInt(n + 1, 17), halfhour())
});
if (r > 0.9) {
++deliverables;
data.push({
title: 'Deliverable ' + deliverables + ' Due',
allDay: true,
startDate: current,
endDate: D.add(current, D.DAY, 1)
});
}
}
}
current = D.add(current, D.DAY, incr);
}
return U.generateIds(data, 6000);
})()
};
Ext.ux.ajax.SimManager.register({
'/KitchenSink/CalendarFull': {
type: 'json',
data: [{
id: 1,
title: 'Work Calendar',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarFullEvents/1'
}
}
}, {
id: 2,
title: 'Personal',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarFullEvents/2'
}
}
}, {
id: 3,
title: 'Project Zeus',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarFullEvents/3'
}
}
}]
},
'/KitchenSink/CalendarFullEvents/1': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.work, 1, ctx);
}
},
'/KitchenSink/CalendarFullEvents/2': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.personal, 2, ctx);
}
},
'/KitchenSink/CalendarFullEvents/3': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.projectZeus, 3, ctx);
}
}
});
Ext.define('KitchenSink.data.calendar.Day', {
}, function () {
var U = KitchenSink.data.calendar.Util,
D = Ext.Date,
today = D.clearTime(new Date(), true),
tomorrow = D.add(today, D.DAY, 1),
data = {
work: (function () {
var ret = [];
ret.push({
title: 'All Day',
allDay: true,
startDate: U.setHours(today, 0, 0),
endDate: U.setHours(tomorrow, 0, 0)
});
ret.push({
title: 'Daily Stand Up',
startDate: U.setHours(today, 8, 30),
endDate: U.setHours(today, 9, 0)
}, {
title: 'Client A Meeting',
startDate: U.setHours(today, 10, 0),
endDate: U.setHours(today, 11, 30)
}, {
title: 'Team Lunch',
startDate: U.setHours(today, 12, 30),
endDate: U.setHours(today, 13, 30)
}, {
title: 'Dev Meeting',
startDate: U.setHours(today, 14, 0),
endDate: U.setHours(today, 15, 0)
}, {
title: 'PM Meeting',
startDate: U.setHours(today, 14, 30),
endDate: U.setHours(today, 15, 30)
}, {
title: 'QA Meeting',
startDate: U.setHours(today, 15, 0),
endDate: U.setHours(today, 16, 30)
});
ret.push({
title: 'Hackathon',
startDate: U.setHours(tomorrow, 9, 30),
endDate: U.setHours(tomorrow, 16, 30)
}, {
title: 'QA Meeting',
startDate: U.setHours(tomorrow, 11, 0),
endDate: U.setHours(tomorrow, 11, 30)
}, {
title: 'Client B Meeting',
startDate: U.setHours(tomorrow, 11, 30),
endDate: U.setHours(tomorrow, 13, 0)
}, {
title: 'Review Design Concepts',
startDate: U.setHours(tomorrow, 15, 30),
endDate: U.setHours(tomorrow, 17, 0)
});
return U.generateIds(ret, 100);
})(),
personal: (function () {
var ret = [];
ret.push({
title: 'Call Accountant',
startDate: U.setHours(today, 8, 0),
endDate: U.setHours(today, 8, 30)
}, {
title: 'Gym',
startDate: U.setHours(today, 17, 30),
endDate: U.setHours(today, 18, 30)
}, {
title: 'Dinner with Susan',
startDate: U.setHours(today, 19, 0),
endDate: U.setHours(today, 21, 30)
});
ret.push({
title: 'Gym',
startDate: U.setHours(tomorrow, 7, 30),
endDate: U.setHours(tomorrow, 8, 30)
}, {
title: 'Weekly pickup game',
startDate: U.setHours(tomorrow, 18, 0),
endDate: U.setHours(tomorrow, 19, 30)
});
return U.generateIds(ret, 200);
})()
};
Ext.ux.ajax.SimManager.register({
'/KitchenSink/CalendarDays': {
type: 'json',
data: [{
id: 1,
title: 'Work',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarDaysEvents/1'
}
}
}, {
id: 2,
title: 'Personal',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarDaysEvents/2'
}
}
}]
},
'/KitchenSink/CalendarDaysEvents/1': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.work, 1, ctx);
}
},
'/KitchenSink/CalendarDaysEvents/2': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.personal, 2, ctx);
}
}
});
});
Ext.define('KitchenSink.data.calendar.Validation', {
}, function () {
var U = KitchenSink.data.calendar.Util,
D = Ext.Date,
data = {
work: (function () {
var today = D.clearTime(new Date(), true),
tomorrow = D.add(today, D.DAY, 1),
ret = [];
ret.push({
title: 'Not draggable',
startDate: U.setHours(today, 9),
endDate: U.setHours(today, 10)
}, {
title: 'Not draggable/resizable',
startDate: U.setHours(today, 13),
endDate: U.setHours(today, 14)
}, {
title: 'Not resizable',
startDate: U.setHours(tomorrow, 9),
endDate: U.setHours(tomorrow, 10)
}, {
title: 'Unrestricted',
startDate: U.setHours(tomorrow, 13),
endDate: U.setHours(tomorrow, 14)
});
return U.generateIds(ret, 0);
})()
};
Ext.ux.ajax.SimManager.register({
'/KitchenSink/CalendarValidation': {
type: 'json',
data: [{
id: 1,
title: 'Work',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarValidation/1'
}
}
}]
},
'/KitchenSink/CalendarValidation/1': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.work, 1, ctx);
}
}
});
})
Ext.define('KitchenSink.data.calendar.Timezone', {}, function () {
var U = KitchenSink.data.calendar.Util,
D = Ext.Date,
data = {
work: (function () {
var today = D.clearTime(new Date(), true),
tomorrow = D.add(today, D.DAY, 1),
ret = [];
ret.push({
title: 'Write unit tests',
allDay: true,
startDate: U.setHours(today, 0, 0),
endDate: U.setHours(tomorrow, 0, 0)
}, {
title: 'Paris Client Meeting',
startDate: U.setHours(today, 8, 30),
endDate: U.setHours(today, 9, 30)
}, {
title: 'Sydney Team Meeting',
startDate: U.setHours(today, 21, 0),
endDate: U.setHours(today, 22, 0)
});
return U.generateIds(ret, 0);
})()
};
Ext.ux.ajax.SimManager.register({
'/KitchenSink/CalendarTimezone': {
type: 'json',
data: [{
id: 1,
title: 'Work',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarTimezone/1'
}
}
}]
},
'/KitchenSink/CalendarTimezone/1': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.work, 1, ctx);
}
}
});
});
Ext.define('Kitchensink.data.calendar.Week', {}, function () {
var U = KitchenSink.data.calendar.Util,
D = Ext.Date,
today = D.clearTime(new Date(), true),
start = U.findPrevious(today, 0),
data = {
work: (function () {
var sd = start.getDate(),
data = [];
data.push({
title: 'Release Meeting',
startDate: U.setDate(start, sd + 1, 9, 30),
endDate: U.setDate(start, sd + 1, 11, 0)
}, {
title: 'Ben Farewell Lunch',
startDate: U.setDate(start, sd + 2, 12, 0),
endDate: U.setDate(start, sd + 2, 14, 0)
}, {
title: 'Client A Meeting',
startDate: U.setDate(start, sd + 3, 14, 0),
endDate: U.setDate(start, sd + 3, 16, 0)
}, {
title: 'Client B Meeting',
startDate: U.setDate(start, sd + 4, 10, 0),
endDate: U.setDate(start, sd + 4, 11, 30)
}, {
allDay: true,
title: 'Help Web Team',
startDate: U.setDate(start, sd + 4, 0, 0),
endDate: U.setDate(start, sd + 6, 0, 0)
})
return U.generateIds(data, 6000);
})(),
personal: (function () {
var ret = [],
d;
ret.push({
title: 'Mom Bday Party',
startDate: U.setHours(start, 11, 0),
endDate: U.setHours(start, 16, 0)
});
d = D.add(start, D.DAY, 3);
ret.push({
title: 'Book Group',
startDate: U.setHours(d, 18, 30),
endDate: U.setHours(d, 19, 30)
});
return U.generateIds(ret, 200);
})()
};
Ext.ux.ajax.SimManager.register({
'/KitchenSink/CalendarWeek': {
type: 'json',
data: [{
id: 1,
title: 'Work',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarWeekEvents/1'
}
}
}, {
id: 2,
title: 'Personal',
eventStore: {
proxy: {
type: 'ajax',
url: '/KitchenSink/CalendarWeekEvents/2'
}
}
}]
},
'/KitchenSink/CalendarWeekEvents/1': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.work, 1, ctx);
}
},
'/KitchenSink/CalendarWeekEvents/2': {
type: 'json',
data: function (ctx) {
return U.generateOutput(data.personal, 2, ctx);
}
}
});
});
});
}
}
| CalendarService |
mine.rs | use amethyst::{
core::Parent,
prelude::*,
ui::{Anchor, UiImage, UiTransform},
window::ScreenDimensions,
};
pub struct | ;
impl SimpleState for Mine {
fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {
let world = data.world;
let btn_textures = crate::load_btn_textures(world);
let cell_textures = crate::load_cell_textures(world);
let number_textures = crate::load_number_textures(world);
let dpi = {
let dimensions = world.read_resource::<ScreenDimensions>();
dimensions.hidpi_factor()
} as f32;
let cell_width = crate::CELL_WIDTH as f32 * dpi;
let cell_height = crate::CELL_HEIGHT as f32 * dpi;
let header_height = crate::HEADER_HEIGHT as f32 * dpi;
let btn_width = crate::BTN_WIDTH as f32 * dpi;
let btn_height = crate::BTN_HEIGHT as f32 * dpi;
let number_width = crate::NUMBER_WIDTH as f32 * dpi;
let number_height = crate::NUMBER_HEIGHT as f32 * dpi;
let header_group = world
.create_entity()
.with(UiTransform::new(
"header-group".to_string(),
Anchor::BottomLeft,
Anchor::BottomLeft,
0.0,
cell_height * crate::CELL_ROW as f32,
0.0,
cell_width * crate::CELL_COL as f32,
header_height,
))
.build();
world
.create_entity()
.with(UiTransform::new(
"face-btn".to_string(),
Anchor::Middle,
Anchor::Middle,
0.0,
0.0,
0.0,
btn_width,
btn_height,
))
.with(UiImage::Texture(btn_textures.ok.clone()))
.with(Parent {
entity: header_group,
})
.with(crate::ResetBtn { click_down: false })
.build();
for i in 0..3 {
world
.create_entity()
.with(UiTransform::new(
format!("mine-count-{}", i),
Anchor::MiddleLeft,
Anchor::MiddleLeft,
i as f32 * number_width,
0.0,
0.0,
number_width,
number_height,
))
.with(UiImage::Texture(number_textures.textures[0usize].clone()))
.with(Parent {
entity: header_group,
})
.with(crate::RestMineNum { index: 2 - i })
.build();
}
for i in 0..3 {
world
.create_entity()
.with(UiTransform::new(
format!("time-count-{}", i),
Anchor::MiddleRight,
Anchor::MiddleRight,
i as f32 * -number_width,
0.0,
0.0,
number_width,
number_height,
))
.with(UiImage::Texture(number_textures.textures[0usize].clone()))
.with(Parent {
entity: header_group,
})
.with(crate::TimerNum { index: i })
.build();
}
let cell_group = world
.create_entity()
.with(UiTransform::new(
"cell-group".to_string(),
Anchor::BottomLeft,
Anchor::BottomLeft,
0.0,
0.0,
0.0,
cell_width * crate::CELL_COL as f32,
cell_height * crate::CELL_ROW as f32,
))
.build();
let mut cells = vec![];
for i in 0..crate::CELL_ROW {
let mut row = vec![];
for j in 0..crate::CELL_COL {
row.push(
world
.create_entity()
.with(UiTransform::new(
format!("cell-{}-{}", i, j),
Anchor::BottomLeft,
Anchor::BottomLeft,
j as f32 * cell_width,
i as f32 * cell_height,
0.0,
cell_width,
cell_height,
))
.with(UiImage::Texture(cell_textures.normal.clone()))
.with(Parent { entity: cell_group })
.build(),
);
}
cells.push(row);
}
{
let mut cell_components = world.write_storage::<crate::Cell>();
for i in 0..crate::CELL_ROW {
for j in 0..crate::CELL_COL {
let mut around = vec![];
let start_i = if i > 0 { i - 1 } else { 0 };
let end_i = if i < crate::CELL_ROW - 1 {
i + 1
} else {
crate::CELL_ROW - 1
};
let start_j = if j > 0 { j - 1 } else { 0 };
let end_j = if j < crate::CELL_COL - 1 {
j + 1
} else {
crate::CELL_COL - 1
};
for around_i in start_i..=end_i {
for around_j in start_j..=end_j {
if around_i != i || around_j != j {
around.push(cells[around_i][around_j])
}
}
}
cell_components
.insert(
cells[i][j],
crate::Cell {
has_mine: false,
state: crate::CellState::HIDE,
around,
click_down: false,
around_mine_count: 0,
},
)
.unwrap();
}
}
}
world.insert(btn_textures);
world.insert(cell_textures);
world.insert(number_textures);
world.insert(crate::GameState::READY);
world.insert(crate::RestMine {
count: crate::MINE_COUNT as i32,
});
world.insert(crate::GameTimer { timer: 0.0 });
}
}
| Mine |
about_files.go | package go_koans
import (
"io/ioutil"
"strings"
)
func aboutFiles() | {
filename := "about_files.go"
contents, _ := ioutil.ReadFile(filename)
lines := strings.Split(string(contents), "\n")
assert(lines[0] == "package go_koans") // handling files is too trivial
assert(lines[5] == ")") // handling files is too trivial
} |
|
global_utils.py | import sys
import os
import re
import collections
import itertools
import bcolz
import pickle
import numpy as np
import pandas as pd
import gc
import random
import smart_open
import h5py
import csv
import tensorflow as tf
import gensim
import datetime as dt
from tqdm import tqdm_notebook as tqdm
# import multiprocessing as mp
# from itertools import repeat, product
# from functools import partial
# to be able to pickle class methods for multi processing
# https://stackoverflow.com/questions/27318290/why-can-i-pass-an-instance-method-to-multiprocessing-process-but-not-a-multipro
def _instance_method_alias(obj, arg):
"""
Alias for instance method that allows the method to be called in a
multiprocessing pool
"""
return obj.convertSent2WordIds(arg)
def get_embeddings_from_ft(fasttext_vec_file, dim, vocab_words):
"""
convert fast text .vec file to numpy array
created embedding will be in order of words in vocab_words
"""
# gathering words from fasttext vec file--------------------
ft_lines = None
with open(fasttext_vec_file, "r") as f:
ft_lines = f.readlines()
ft_shape = tuple([int(i.strip()) for i in ft_lines[0].split()])
ft_vocab_size = ft_shape[0]
ft_wvs_dict = {}
for i, line in enumerate(ft_lines[1:]):
str_list = line.split()
word = str(str_list[0].strip())
vec = np.array([np.float(f) for f in str_list[1:]])
assert dim == len(vec), "fast text some vectors doesn't match dimensions "+str(dim)+" != "+str(len(vec))
ft_wvs_dict[word] = vec
assert ft_vocab_size == len(ft_wvs_dict), "fast text vectors file read issue "+str(ft_vocab_size)+" != "+str(len(ft_wvs_dict))
# creating embedding matrix from the file --------------------
wvs_embedding = np.random.randn(len(vocab_words), dim)
for i,word in enumerate(vocab_words):
if word in ft_wvs_dict:
wvs_embedding[i] = ft_wvs_dict[word]
return wvs_embedding
#=============================================================
# DOCUMENT PREPROCESSING
#=============================================================
CHAR_ALPHABETS = "abcdefghijklmnopqrstuvwxyz0123456789-,;.!?:'\"/\\|_@#$%^&*~`+-=<>()[]{}\n "
char_start_tag_idx = len(CHAR_ALPHABETS) + 0
char_end_tag_idx = len(CHAR_ALPHABETS) + 1
char_unknown_tag_idx = len(CHAR_ALPHABETS) + 2
# when sentences are converted to characters
# these are appended to signal end of sentences
char_sent_start_tag_idx = len(CHAR_ALPHABETS) + 3
char_sent_end_tag_idx = len(CHAR_ALPHABETS) + 4
CHAR_ALPHABETS_LEN = len(CHAR_ALPHABETS) + 4
class | (object):
"""
This class takes in preprocessed data frame and
generated datasets as necessary
"""
def __init__(self, data_frame, vocab_idx):
self.data_frame = data_frame
self.vocab_idx = vocab_idx
self.vocab_size = len(vocab_idx)
# constants ================================================================================
self.sentence_start_tag_idx = self.vocab_idx["<SOSent>"]
self.sentence_end_tag_idx = self.vocab_idx["<EOSent>"]
self.word_unknown_tag_idx = self.vocab_idx["<UNK>"]
self.default_unit_dict = {
"gene_unit" : "words",
"variation_unit" : "words",
"doc_unit" : "words",
"doc_form" : "text",
"doc_cntx_dir" : "forward",
"divide_document": "single_unit"
}
def convertSent2WordIds(self, sentence, add_start_end_tag=False):
"""
sentence is a list of word.
It is converted to list of ids based on vocab_idx
"""
sent2id = []
if add_start_end_tag:
sent2id = [self.sentence_start_tag_idx]
try:
sent2id = sent2id + [self.vocab_idx[word] if self.vocab_idx[word]<self.vocab_size else self.word_unknown_tag_idx for word in sentence]
except KeyError as e:
print(e)
print (sentence)
raise ValueError('Fix this issue dude')
if add_start_end_tag:
sent2id = sent2id + [self.sentence_end_tag_idx]
return sent2id
def convertDoc2Sent2WordIds(self, document, add_start_end_tag=False):
"""
document is a list of sentence.
sentence is a list of word.
so given sent_list will be converted to list of list of ids based on vocab_idx
"""
return [self.convertSent2WordIds(sentence, add_start_end_tag) for sentence in document]
def convertWord2Char2Ids(self, word, add_start_end_tag=False):
"""
word is a char sequence or list of characters,
return list of ids in word or char sequence
"""
char2id = []
if add_start_end_tag:
char2id = [char_start_tag_idx]
char2id = char2id + [CHAR_ALPHABETS.find(char) for char in word]
if add_start_end_tag:
char2id = char2id + [char_end_tag_idx]
return char2id
def convertSent2Word2Char2Ids(self, sentence, add_start_end_tag=False, unit="chars"):
"""
sentence is list of words
word is list of characters
returns list of list of ids
"""
sent2words2char2id = []
if unit == "chars":
"""
all the words are grouped as list of chars with pre-post added tags
"""
if add_start_end_tag:
sent2words2char2id = [[char_sent_start_tag_idx]]
sent2words2char2id = sent2words2char2id + [self.convertWord2Char2Ids(word, add_start_end_tag) if self.vocab_idx[word] < self.vocab_size else [char_unknown_tag_idx] for word in sentence]
if add_start_end_tag:
sent2words2char2id = sent2words2char2id + [[char_sent_end_tag_idx]]
elif unit == "raw_chars":
"""
just a stream of characters
"""
if add_start_end_tag:
sent2words2char2id = [char_sent_start_tag_idx]
for word in sentence:
if self.vocab_idx[word] < self.vocab_size:
sent2words2char2id += [charid for charid in self.convertWord2Char2Ids(word, add_start_end_tag)]
else:
sent2words2char2id += [char_unknown_tag_idx]
if add_start_end_tag:
sent2words2char2id = sent2words2char2id + [char_sent_end_tag_idx]
else:
assert False, "give valid doc_unit argument"
return sent2words2char2id
def convertDoc2Sent2Word2Char2Ids(self, document, doc_form="sentences", add_start_end_tag=False, unit="chars"):
"""
document is a list of sentence.
sentence is a list of word.
so given sent_list will be converted to list of list of ids based on vocab_idx
returns list of list if doc_form == "text"
returns list of list of list if doc_form == "sentences"
"""
doc2word2char2ids = []
if doc_form == "sentences":
doc2word2char2ids = [self.convertSent2Word2Char2Ids(sentence, add_start_end_tag, unit) for sentence in document]
elif doc_form == "text":
doc2word2char2ids = [list_or_charid for list_or_charid in self.convertSent2Word2Char2Ids(sentence, add_start_end_tag, unit)]
else:
assert False, "give valid doc_form argument"
return doc2word2char2ids
def generate_data(self, unit_dict=None, has_class=False, add_start_end_tag=False):
"""
dataframe expects to have Sentences, Variations, Genes, Class(has_class)
Sentences Text attribute converted to list of sentences which in turn converted to list of words
Variations just one sentence which is a list of words
Genes just one sentence which is a list of words
returns information based on request
unit_dict contains these 5 keys that can have
gene_unit can be ["words", "chars", "raw_chars"]
variation_unit can be ["words", "chars", "raw_chars"]
doc_unit can be ["words", "word_list", chars", "raw_chars"]
doc_form can be ["sentences", "text"]
doc_cntx_dir can be ["forward", "backward"]
divide_document can be ["single_unit", "multiple_units"]
"""
if not unit_dict:
unit_dict = self.default_unit_dict
try:
unit_dict["doc_cntx_dir"]
except KeyError as e:
unit_dict["doc_cntx_dir"] = "forward"
ids_document = []
ids_labels = []
ids_genes = []
ids_variations = []
# since sometimes the data will be shuffled in the frame
# during train test split
for index in self.data_frame.index:
document = self.data_frame.Sentences[index]
if unit_dict["divide_document"] == "single_unit": #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~`
# doc units --------------------------------------------------------------
if unit_dict["doc_unit"] == "words" or unit_dict["doc_unit"] == "word_list":
if unit_dict["doc_form"] == "sentences":
ids_document.append(self.convertDoc2Sent2WordIds(document, add_start_end_tag))
else: # unit_dict["doc_form"] == "text"
# using multiprocess to process each sentence in document and concatenate them to a single sentence
# get_wordid_list = lambda d, setag : [wid for s in d for wid in self.convertSent2WordIds(s, setag)]
# text_word_list = []
# with mp.Pool(processes = 5) as pool:
# # text_word_list = pool.starmap(get_wordid_list, product(document, [add_start_end_tag]*len(document)))
# # text_word_list = pool.starmap(get_wordid_list, zip(document, repeat(add_start_end_tag)))
# text_word_list = pool.map(partial(get_wordid_list, setag=add_start_end_tag), document)
# without multiprocessing
if unit_dict["doc_unit"] == "words":
text_word_list = [word_id for sentence in document for word_id in self.convertSent2WordIds(sentence, add_start_end_tag)]
if unit_dict["doc_cntx_dir"] == "backward":
text_word_list = text_word_list[::-1]
else: # unit_dict["doc_unit"] == "word_list": sentence form a list
text_word_list = [self.convertSent2WordIds(sentence, add_start_end_tag) for sentence in document]
if unit_dict["doc_cntx_dir"] == "backward":
text_word_list = [self.convertSent2WordIds(sentence, add_start_end_tag)[::-1] for sentence in document]
ids_document.append(text_word_list)
elif unit_dict["doc_unit"] == "chars" or unit_dict["doc_unit"] == "raw_chars":
if unit_dict["doc_form"] == "sentences":
for sentence in document:
ids_document.append(self.convertDoc2Sent2Word2Char2Ids(document,
doc_form=unit_dict["doc_form"], unit=unit_dict["doc_unit"], add_start_end_tag=add_start_end_tag))
else: # unit_dict["doc_form"] == "text"
text_char_list = [word_as_char_list_id for sentence in document for word_as_char_list_id in self.convertSent2Word2Char2Ids(sentence, add_start_end_tag, unit=unit_dict["doc_unit"])]
ids_document.append(text_char_list)
else:
assert False, "give valid doc_unit key-value"
# others --------------------------------------------------------------
if has_class:
ids_labels.append(self.data_frame.Class[index])
if unit_dict["gene_unit"] == "words":
ids_genes.append(self.convertSent2WordIds(self.data_frame.Gene[index], add_start_end_tag))
else:
ids_genes.append(self.convertSent2Word2Char2Ids(self.data_frame.Gene[index],
add_start_end_tag, unit=unit_dict["doc_unit"]))
if unit_dict["variation_unit"] == "words":
ids_variations.append(self.convertSent2WordIds(self.data_frame.Variation[index], add_start_end_tag))
else:
ids_variations.append(self.convertSent2Word2Char2Ids(self.data_frame.Variation[index],
add_start_end_tag, unit=unit_dict["doc_unit"]))
else: # unit_dict["divide_document"] == "multiple_unit" #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~`
for sentence in document:
# doc units --------------------------------------------------------------
if unit_dict["doc_unit"] == "words":
# doesnt matter if
# unit_dict["doc_form"] == "sentences"
# unit_dict["doc_form"] == "text"
try:
sentence_list = self.convertSent2WordIds(sentence, add_start_end_tag)
if unit_dict["doc_cntx_dir"] == "backward":
text_word_list = self.convertSent2WordIds(sentence, add_start_end_tag)[::-1]
ids_document.append(sentence_list)
except ValueError as e:
print(e)
print (index)
raise ValueError('Fix this issue dude !')
elif unit_dict["doc_unit"] == "chars" or unit_dict["doc_unit"] == "raw_chars":
# doesnt matter if
# unit_dict["doc_form"] == "sentences"
# unit_dict["doc_form"] == "text"
ids_document.append(self.convertSent2Word2Char2Ids(sentence, add_start_end_tag,
unit=unit_dict["doc_unit"]))
# others --------------------------------------------------------------
if has_class:
ids_labels.append(self.data_frame.Class[index])
if unit_dict["gene_unit"] == "words":
ids_genes.append(self.convertSent2WordIds(self.data_frame.Gene[index], add_start_end_tag))
else:
ids_genes.append(self.convertSent2Word2Char2Ids(self.data_frame.Gene[index],
add_start_end_tag, unit=unit_dict["gene_unit"]))
if unit_dict["variation_unit"] == "words":
ids_variations.append(self.convertSent2WordIds(self.data_frame.Variation[index], add_start_end_tag))
else:
ids_variations.append(self.convertSent2Word2Char2Ids(self.data_frame.Variation[index],
add_start_end_tag, unit=unit_dict["variation_unit"]))
return ids_document, ids_genes, ids_variations, ids_labels
def placeholder_function(self, unit_dict=None, limit_dict=None,
has_class=False, add_start_end_tag=False):
"""
dataframe expects to have Sentences, Variations, Genes, Class(has_class)
Sentences Text attribute converted to list of sentences which in turn converted to list of words
Variations just one sentence which is a list of words
Genes just one sentence which is a list of words
returns information based on request
unit_dict contains these 5 keys that can have
gene_unit can be ["words", "chars"]
variation_unit can be ["words", "chars"]
doc_unit can be ["words", "chars"]
doc_form can be ["sentences", "text"]
divide_document can be ["single_unit", "multiple_units"]
limit_dict contains max sequence len to form valid matrices
Text attribute options
max_text_seq_len => maximum number of words in a text
max_text_document_len => maximum number of sentences in a document
max_text_sentence_len => maximum number of words in a sentence
max_text_word_len => maximum number of chars in a word
Gene Attribute options
max_gene_sentence_len => maximum number of words in a sentence
max_gene_word_len => maximum number of chars in a word
Variation Attribute options
max_variation_sentence_len => maximum number of words in a sentence
max_variation_word_len => maximum number of chars in a word
"""
ids_document, ids_genes, ids_variations, ids_labels = self.generate_dataset(unit_dict, has_class, add_start_end_tag)
# testing ======================================================================================
def test_class():
document = [
['beautiful', 'is', 'better', 'than', 'ugly.'],
['explicit', 'is', 'better', 'than', 'implicit.'],
['simple', 'is', 'better', 'than', 'complex.'],
['complex', 'is', 'better', 'than', 'complicated.'],
['flat', 'is', 'better', 'than', 'nested.'],
# ['sparse', 'is', 'better', 'than', 'dense.'],
# ['readability', 'counts.'],
# ['special', 'cases', "aren't", 'special', 'enough', 'to', 'break', 'the', 'rules.'],
# ['although', 'practicality', 'beats', 'purity.'],
# ['errors', 'should', 'never', 'pass', 'silently.'],
# ['unless', 'explicitly', 'silenced.'],
# ['in', 'the', 'face', 'of', 'ambiguity,', 'refuse', 'the', 'temptation', 'to', 'guess.'],
# ['there', 'should', 'be', 'one--', 'and', 'preferably', 'only', 'one', '--obvious', 'way', 'to', 'do', 'it.'],
# ['although', 'that', 'way', 'may', 'not', 'be', 'obvious', 'at', 'first', 'unless', "you're", 'Dutch.'],
# ['now', 'is', 'better', 'than', 'never.'], ['Although', 'never', 'is', 'often', 'better', 'than', '*right*', 'now.'],
# ['if', 'the', 'implementation', 'is', 'hard', 'to', 'explain,', "it's", 'a', 'bad', 'idea.'],
# ['if', 'the', 'implementation', 'is', 'easy', 'to', 'explain,', 'it', 'may', 'be', 'a', 'good', 'idea.'],
# ['namespaces', 'are', 'one', 'honking', 'great', 'idea', '--', "let's", 'do', 'more', 'of', 'those!'],
]
data_dict = {
"ID" : 0,
"Gene" : [["beautiful"]],
"Variation" : [["complex", "simple"]],
"Class" : 0,
"Sentences" : [document[:]]
}
custom_unit_dict = {
"gene_unit" : "raw_chars",
"variation_unit" : "raw_chars",
# text transformed to sentences attribute
"doc_unit" : "raw_chars",
"doc_form" : "sentences",
# "doc_cntx_dir" : "forward",
"divide_document" : "single_unit"
}
df = pd.DataFrame(data=data_dict)
corpus = sorted(list(set([word for sentence in document for word in sentence])))
corpus_wordidx = {word:i for i,word in enumerate(corpus)}
corpus_wordidx["<SOSent>"] = len(corpus)
corpus_wordidx["<EOSent>"] = len(corpus) + 1
corpus_wordidx["<UNK>"] = len(corpus) + 2
gen_data = GenerateDataset(df, corpus_wordidx)
x_T, x_G, x_V, x_C = gen_data.generate_data(custom_unit_dict, has_class=True, add_start_end_tag=True)
print("data", df.Sentences[0], "\n")
print(corpus_wordidx)
index = 0
print("text",np.array(x_T).shape, x_T[index])
print("gene",np.array(x_G).shape, x_G[index])
print("variation",np.array(x_V).shape, x_V[index])
print("classes",np.array(x_C).shape, x_C[index])
if __name__ == "__main__":
test_class()
| GenerateDataset |
engine_spark.rs | // Copyright Kamu Data, Inc. and contributors. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
use crate::domain::*;
use crate::infra::*;
use container_runtime::*;
use opendatafabric::engine::ExecuteQueryError;
use opendatafabric::serde::yaml::YamlEngineProtocol;
use opendatafabric::serde::EngineProtocolDeserializer;
use opendatafabric::{ExecuteQueryResponse, ExecuteQueryResponseSuccess};
use rand::Rng;
use std::fs::File;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tracing::info;
pub struct SparkEngine {
container_runtime: ContainerRuntime,
image: String,
workspace_layout: Arc<WorkspaceLayout>,
}
struct RunInfo {
in_out_dir: PathBuf,
stdout_path: PathBuf,
stderr_path: PathBuf,
}
impl RunInfo {
fn new(workspace_layout: &WorkspaceLayout, operation: &str) -> Self {
let run_id: String = rand::thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(10)
.map(char::from)
.collect();
let in_out_dir = workspace_layout
.run_info_dir
.join(format!("{}-{}", operation, &run_id));
std::fs::create_dir_all(&in_out_dir).expect("Failed to create in-out directory");
Self {
in_out_dir: in_out_dir,
stdout_path: workspace_layout
.run_info_dir
.join(format!("spark-{}.out.txt", run_id)),
stderr_path: workspace_layout
.run_info_dir
.join(format!("spark-{}.err.txt", run_id)),
}
}
pub fn log_files(&self) -> Vec<PathBuf> {
vec![self.stdout_path.clone(), self.stderr_path.clone()]
}
}
impl SparkEngine {
pub fn new(
container_runtime: ContainerRuntime,
image: &str,
workspace_layout: Arc<WorkspaceLayout>,
) -> Self {
Self {
container_runtime: container_runtime,
image: image.to_owned(),
workspace_layout,
}
}
fn | (&self) -> PathBuf {
PathBuf::from("/opt/engine/volume")
}
fn in_out_dir_in_container(&self) -> PathBuf {
PathBuf::from("/opt/engine/in-out")
}
fn to_container_path(&self, path: &Path) -> PathBuf {
assert!(path.is_absolute());
assert!(self.workspace_layout.local_volume_dir.is_absolute());
let rel = path
.strip_prefix(&self.workspace_layout.local_volume_dir)
.unwrap();
let joined = self.volume_dir_in_container().join(rel);
let unix_path = joined.to_str().unwrap().replace("\\", "/");
PathBuf::from(unix_path)
}
fn ingest_impl(
&self,
run_info: RunInfo,
request: IngestRequest,
) -> Result<ExecuteQueryResponseSuccess, EngineError> {
let request_path = run_info.in_out_dir.join("request.yaml");
let response_path = run_info.in_out_dir.join("response.yaml");
{
info!(request = ?request, path = ?request_path, "Writing request");
let file = File::create(&request_path)?;
serde_yaml::to_writer(file, &request)
.map_err(|e| EngineError::internal(e, Vec::new()))?;
}
let volume_map = vec![
(run_info.in_out_dir.clone(), self.in_out_dir_in_container()),
(
self.workspace_layout.local_volume_dir.clone(),
self.volume_dir_in_container(),
),
];
let stdout_file = std::fs::File::create(&run_info.stdout_path)?;
let stderr_file = std::fs::File::create(&run_info.stderr_path)?;
// TODO: chown hides exit status
cfg_if::cfg_if! {
if #[cfg(unix)] {
let chown = if self.container_runtime.config.runtime == ContainerRuntimeType::Docker {
format!(
"; chown -R {}:{} {}",
users::get_current_uid(),
users::get_current_gid(),
self.volume_dir_in_container().display()
)
} else {
"".to_owned()
};
} else {
let chown = "".to_owned();
}
};
let mut cmd = self.container_runtime.run_shell_cmd(
RunArgs {
image: self.image.clone(),
volume_map: volume_map,
user: Some("root".to_owned()),
..RunArgs::default()
},
&[
indoc::indoc!(
"/opt/bitnami/spark/bin/spark-submit \
--master=local[4] \
--driver-memory=2g \
--class=dev.kamu.engine.spark.ingest.IngestApp \
/opt/engine/bin/engine.spark.jar"
)
.to_owned(),
chown,
],
);
info!(command = ?cmd, "Running Spark job");
let status = cmd
.stdout(std::process::Stdio::from(stdout_file))
.stderr(std::process::Stdio::from(stderr_file))
.status()
.map_err(|e| EngineError::internal(e, run_info.log_files()))?;
if response_path.exists() {
let data = std::fs::read_to_string(&response_path)?;
let response = YamlEngineProtocol
.read_execute_query_response(data.as_bytes())
.map_err(|e| EngineError::internal(e, run_info.log_files()))?;
info!(response = ?response, "Read response");
match response {
ExecuteQueryResponse::Progress => unreachable!(),
ExecuteQueryResponse::Success(s) => Ok(s),
ExecuteQueryResponse::InvalidQuery(e) => {
Err(EngineError::invalid_query(e.message, run_info.log_files()))
}
ExecuteQueryResponse::InternalError(e) => Err(EngineError::internal(
ExecuteQueryError::from(e),
run_info.log_files(),
)),
}
} else if !status.success() {
Err(EngineError::process_error(
status.code(),
run_info.log_files(),
))
} else {
Err(EngineError::contract_error(
"Engine did not write a response file",
run_info.log_files(),
))
}
}
}
impl IngestEngine for SparkEngine {
fn ingest(&self, request: IngestRequest) -> Result<ExecuteQueryResponseSuccess, EngineError> {
let run_info = RunInfo::new(&self.workspace_layout, "ingest");
// Remove data_dir if it exists but empty as it will confuse Spark
let _ = std::fs::remove_dir(&request.data_dir);
let request_adj = IngestRequest {
ingest_path: self.to_container_path(&request.ingest_path),
prev_checkpoint_dir: request
.prev_checkpoint_dir
.map(|p| self.to_container_path(&p)),
new_checkpoint_dir: self.to_container_path(&request.new_checkpoint_dir),
data_dir: self.to_container_path(&request.data_dir),
out_data_path: self.to_container_path(&request.out_data_path),
..request
};
self.ingest_impl(run_info, request_adj)
}
}
| volume_dir_in_container |
wdt.rs | //! HAL interface to the WDT peripheral.
//!
//! This HAL implements a basic watchdog timer with 1..=8 handles.
//! Once the watchdog has been started, it cannot be stopped.
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(feature = "9160")] {
use crate::pac::WDT_NS as WDT;
} else {
use crate::pac::WDT;
}
}
use handles::*;
/// A type state representing a watchdog that has not been started.
pub struct Inactive;
/// A type state representing a watchdog that has been started and cannot be stopped.
pub struct Active;
/// An interface to the Watchdog.
pub struct Watchdog<T: sealed::WdMode> {
wdt: WDT,
_state: T,
}
/// A structure containing the active watchdog and all requested Watchdog handles.
pub struct Parts<T> {
pub watchdog: Watchdog<Active>,
pub handles: T,
}
/// An interface to feed the Watchdog.
pub struct WatchdogHandle<T: sealed::HandleId>(T);
impl<T> WatchdogHandle<T>
where
T: sealed::HandleId,
{
/// Pet the watchdog.
///
/// This function pets the given watchdog handle.
///
/// NOTE: All active handles must be pet within the time interval to
/// prevent a reset from occuring.
#[inline]
pub fn pet(&mut self) {
let hdl = unsafe { &*WDT::ptr() };
hdl.rr[self.0.index()].write(|w| w.rr().reload());
}
/// Has this handle been pet within the current window?
pub fn is_pet(&self) -> bool {
let hdl = unsafe { &*WDT::ptr() };
let rd = hdl.reqstatus.read().bits();
let idx = self.0.index();
debug_assert!(idx < 8, "Bad Index!");
((rd >> idx) & 0x1) == 0
}
/// Convert the handle into a generic handle.
///
/// This is useful if you need to place handles into an array.
pub fn degrade(self) -> WatchdogHandle<HdlN> {
WatchdogHandle(HdlN {
idx: self.0.index() as u8,
})
}
}
impl Watchdog<Inactive> {
/// Try to create a new watchdog instance from the peripheral.
///
/// This function will return an error if the watchdog has already
/// been activated, which may happen on a (non-watchdog) soft reset.
/// In this case, it may be possible to still obtain the handles with
/// the `Watchdog::try_recover()` method.
///
/// If the watchdog has already started, configuration is no longer possible.
#[inline]
pub fn try_new(wdt: WDT) -> Result<Watchdog<Inactive>, WDT> {
let watchdog = Watchdog {
wdt,
_state: Inactive,
};
if watchdog.is_active() {
Err(watchdog.wdt)
} else {
Ok(watchdog)
}
}
/// Release the peripheral.
///
/// Note: The peripheral cannot be released after activation.
#[inline]
pub fn release(self) -> WDT {
self.wdt
}
/// Activate the watchdog with the given number of handles.
///
/// The watchdog cannot be deactivated after starting.
///
/// NOTE: All activated handles must be pet within the configured time interval to
/// prevent a reset from occuring.
pub fn activate<H: sealed::Handles>(self) -> Parts<H::Handles> {
self.wdt.rren.write(|w| unsafe { w.bits(H::ENABLE) });
self.wdt.tasks_start.write(|w| unsafe { w.bits(1) });
Parts {
watchdog: Watchdog {
wdt: self.wdt,
_state: Active,
},
handles: H::create_handle(),
}
}
/// Enable the watchdog interrupt.
///
/// NOTE: Although the interrupt will occur, there is no way to prevent
/// the reset from occuring. From the time the event was fired, the
/// system will reset two LFCLK ticks later (61 microseconds) if the
/// interrupt has been enabled.
#[inline(always)]
pub fn enable_interrupt(&mut self) {
self.wdt.intenset.write(|w| w.timeout().set_bit());
}
/// Disable the watchdog interrupt.
///
/// NOTE: This has no effect on the reset caused by the Watchdog.
#[inline(always)]
pub fn disable_interrupt(&mut self) {
self.wdt.intenclr.write(|w| w.timeout().set_bit());
}
/// Set the number of 32.768kHz ticks in each watchdog period.
///
/// This value defaults to 0xFFFF_FFFF (1.5 days) on reset.
///
/// Note: there is a minimum of 15 ticks (458 microseconds). If a lower
/// number is provided, 15 ticks will be used as the configured value.
#[inline(always)]
pub fn set_lfosc_ticks(&mut self, ticks: u32) {
self.wdt
.crv
.write(|w| unsafe { w.bits(ticks.max(0x0000_000F)) });
}
/// Should the watchdog continue to count during sleep modes?
///
/// This value defaults to ENABLED on reset.
#[inline]
pub fn run_during_sleep(&self, setting: bool) {
self.wdt.config.modify(|_r, w| w.sleep().bit(setting));
}
/// Should the watchdog continue to count when the CPU is halted for debug?
///
/// This value defaults to DISABLED on reset.
#[inline]
pub fn run_during_debug_halt(&self, setting: bool) {
self.wdt.config.modify(|_r, w| w.halt().bit(setting));
}
}
impl Watchdog<Active> {
/// Is the watchdog still awaiting pets from any handle?
///
/// This reports whether sufficient pets have been received from all
/// handles to prevent a reset this time period.
#[inline(always)]
pub fn awaiting_pets(&self) -> bool {
let enabled = self.wdt.rren.read().bits();
let status = self.wdt.reqstatus.read().bits();
(status & enabled) == 0
}
/// Try to recover a handle to an already running watchdog. If the
/// number of requested handles matches the activated number of handles,
/// an activated handle will be returned. Otherwise the peripheral will
/// be returned.
///
/// NOTE: Since the watchdog is already counting, you want to pet these dogs
/// as soon as possible!
pub fn try_recover<H: sealed::Handles>(wdt: WDT) -> Result<Parts<H::Handles>, WDT> {
// Do we have the same number of handles at least?
if wdt.rren.read().bits() == H::ENABLE {
Ok(Parts {
watchdog: Watchdog {
wdt,
_state: Active,
},
handles: H::create_handle(),
})
} else {
Err(wdt)
}
}
}
impl<T> Watchdog<T>
where
T: sealed::WdMode,
{
/// Is the watchdog active?
#[inline(always)]
pub fn is_active(&self) -> bool {
cfg_if! {
if #[cfg(feature = "9160")] {
self.wdt.runstatus.read().runstatuswdt().bit_is_set()
} else {
self.wdt.runstatus.read().runstatus().bit_is_set()
}
}
}
}
mod sealed {
pub trait HandleId {
fn index(&self) -> usize;
}
pub trait Handles {
type Handles;
const ENABLE: u32;
fn create_handle() -> Self::Handles;
}
pub trait WdMode {}
}
impl sealed::WdMode for Inactive {}
impl sealed::WdMode for Active {}
impl sealed::HandleId for Hdl0 {
fn index(&self) -> usize {
0
}
}
impl sealed::HandleId for Hdl1 {
fn index(&self) -> usize {
1
}
}
impl sealed::HandleId for Hdl2 {
fn index(&self) -> usize {
2
}
}
impl sealed::HandleId for Hdl3 {
fn index(&self) -> usize {
3
}
}
impl sealed::HandleId for Hdl4 {
fn index(&self) -> usize |
}
impl sealed::HandleId for Hdl5 {
fn index(&self) -> usize {
5
}
}
impl sealed::HandleId for Hdl6 {
fn index(&self) -> usize {
6
}
}
impl sealed::HandleId for Hdl7 {
fn index(&self) -> usize {
7
}
}
impl sealed::HandleId for HdlN {
fn index(&self) -> usize {
self.idx.into()
}
}
pub mod handles {
//! Type states representing individual watchdog handles.
/// A type state representing Watchdog Handle 0.
pub struct Hdl0;
/// A type state representing Watchdog Handle 1.
pub struct Hdl1;
/// A type state representing Watchdog Handle 2.
pub struct Hdl2;
/// A type state representing Watchdog Handle 3.
pub struct Hdl3;
/// A type state representing Watchdog Handle 4.
pub struct Hdl4;
/// A type state representing Watchdog Handle 5.
pub struct Hdl5;
/// A type state representing Watchdog Handle 6.
pub struct Hdl6;
/// A type state representing Watchdog Handle 7.
pub struct Hdl7;
/// A structure that represents a runtime stored Watchdog Handle.
pub struct HdlN {
pub(super) idx: u8,
}
}
pub mod count {
//! Type states representing the number of requested handles.
use super::{sealed::Handles, Hdl0, Hdl1, Hdl2, Hdl3, Hdl4, Hdl5, Hdl6, Hdl7, WatchdogHandle};
/// A type state representing the request for One handles.
pub struct One;
/// A type state representing the request for Two handles.
pub struct Two;
/// A type state representing the request for Three handles.
pub struct Three;
/// A type state representing the request for Four handles.
pub struct Four;
/// A type state representing the request for Five handles.
pub struct Five;
/// A type state representing the request for Six handles.
pub struct Six;
/// A type state representing the request for Seven handles.
pub struct Seven;
/// A type state representing the request for Eight handles.
pub struct Eight;
impl Handles for One {
type Handles = (WatchdogHandle<Hdl0>,);
const ENABLE: u32 = 0b0000_0001;
fn create_handle() -> Self::Handles {
(WatchdogHandle(Hdl0),)
}
}
impl Handles for Two {
type Handles = (WatchdogHandle<Hdl0>, WatchdogHandle<Hdl1>);
const ENABLE: u32 = 0b0000_0011;
fn create_handle() -> Self::Handles {
(WatchdogHandle(Hdl0), WatchdogHandle(Hdl1))
}
}
impl Handles for Three {
type Handles = (
WatchdogHandle<Hdl0>,
WatchdogHandle<Hdl1>,
WatchdogHandle<Hdl2>,
);
const ENABLE: u32 = 0b0000_0111;
fn create_handle() -> Self::Handles {
(
WatchdogHandle(Hdl0),
WatchdogHandle(Hdl1),
WatchdogHandle(Hdl2),
)
}
}
impl Handles for Four {
type Handles = (
WatchdogHandle<Hdl0>,
WatchdogHandle<Hdl1>,
WatchdogHandle<Hdl2>,
WatchdogHandle<Hdl3>,
);
const ENABLE: u32 = 0b0000_1111;
fn create_handle() -> Self::Handles {
(
WatchdogHandle(Hdl0),
WatchdogHandle(Hdl1),
WatchdogHandle(Hdl2),
WatchdogHandle(Hdl3),
)
}
}
impl Handles for Five {
type Handles = (
WatchdogHandle<Hdl0>,
WatchdogHandle<Hdl1>,
WatchdogHandle<Hdl2>,
WatchdogHandle<Hdl3>,
WatchdogHandle<Hdl4>,
);
const ENABLE: u32 = 0b0001_1111;
fn create_handle() -> Self::Handles {
(
WatchdogHandle(Hdl0),
WatchdogHandle(Hdl1),
WatchdogHandle(Hdl2),
WatchdogHandle(Hdl3),
WatchdogHandle(Hdl4),
)
}
}
impl Handles for Six {
type Handles = (
WatchdogHandle<Hdl0>,
WatchdogHandle<Hdl1>,
WatchdogHandle<Hdl2>,
WatchdogHandle<Hdl3>,
WatchdogHandle<Hdl4>,
WatchdogHandle<Hdl5>,
);
const ENABLE: u32 = 0b0011_1111;
fn create_handle() -> Self::Handles {
(
WatchdogHandle(Hdl0),
WatchdogHandle(Hdl1),
WatchdogHandle(Hdl2),
WatchdogHandle(Hdl3),
WatchdogHandle(Hdl4),
WatchdogHandle(Hdl5),
)
}
}
impl Handles for Seven {
type Handles = (
WatchdogHandle<Hdl0>,
WatchdogHandle<Hdl1>,
WatchdogHandle<Hdl2>,
WatchdogHandle<Hdl3>,
WatchdogHandle<Hdl4>,
WatchdogHandle<Hdl5>,
WatchdogHandle<Hdl6>,
);
const ENABLE: u32 = 0b0111_1111;
fn create_handle() -> Self::Handles {
(
WatchdogHandle(Hdl0),
WatchdogHandle(Hdl1),
WatchdogHandle(Hdl2),
WatchdogHandle(Hdl3),
WatchdogHandle(Hdl4),
WatchdogHandle(Hdl5),
WatchdogHandle(Hdl6),
)
}
}
impl Handles for Eight {
type Handles = (
WatchdogHandle<Hdl0>,
WatchdogHandle<Hdl1>,
WatchdogHandle<Hdl2>,
WatchdogHandle<Hdl3>,
WatchdogHandle<Hdl4>,
WatchdogHandle<Hdl5>,
WatchdogHandle<Hdl6>,
WatchdogHandle<Hdl7>,
);
const ENABLE: u32 = 0b1111_1111;
fn create_handle() -> Self::Handles {
(
WatchdogHandle(Hdl0),
WatchdogHandle(Hdl1),
WatchdogHandle(Hdl2),
WatchdogHandle(Hdl3),
WatchdogHandle(Hdl4),
WatchdogHandle(Hdl5),
WatchdogHandle(Hdl6),
WatchdogHandle(Hdl7),
)
}
}
}
| {
4
} |
container-node-icon.js | (function (global, factory) {
if (typeof define === "function" && define.amd) {
define(['exports', '../createIcon'], factory);
} else if (typeof exports !== "undefined") {
factory(exports, require('../createIcon'));
} else {
var mod = {
exports: {}
};
factory(mod.exports, global.createIcon);
global.containerNodeIcon = mod.exports;
}
})(this, function (exports, _createIcon) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createIcon2 = _interopRequireDefault(_createIcon);
function | (obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
var ContainerNodeIcon = (0, _createIcon2.default)({
name: 'ContainerNodeIcon',
height: 1024,
width: 1024,
svgPath: 'M0.914 346.743c-2.743 10.743 0.229 24.229 8.686 40 0 0 141.714 301.714 152.229 325.257s23.771 18.514 23.771 18.514h655.771c0 0 11.886 2.743 22.4-19.657s150.629-324.114 150.629-324.114c8.457-15.771 11.2-29.257 8.686-40-2.743-10.971-9.829-16.229-21.257-16.229h-979.657c-11.429 0-18.514 5.486-21.257 16.229zM1.829 109.028v146.743c0 9.829 3.657 18.514 10.971 25.829s15.771 10.971 25.6 10.971h949.029c9.829 0 18.514-3.657 25.6-10.971 7.314-7.314 10.971-15.771 10.971-25.829v-146.743c0-10.057-3.657-18.514-10.971-25.829s-15.771-10.971-25.6-10.971h-949.257c-10.057 0-18.514 3.657-25.6 10.971s-10.743 15.771-10.743 25.829zM864.914 145.6h85.714v73.371h-85.714v-73.371zM86.857 145.6h85.714v73.371h-85.714v-73.371zM225.6 145.6h85.714v73.371h-85.714v-73.371zM363.886 145.6h85.714v73.371h-85.714v-73.371zM502.629 145.6h85.714v73.371h-85.714v-73.371z',
yOffset: '64',
xOffset: '',
transform: 'rotate(180 0 512) scale(-1 1)'
}); /* This file is generated by createIcons.js any changes will be lost. */
exports.default = ContainerNodeIcon;
}); | _interopRequireDefault |
hkdf.py | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@utils.register_interface(KeyDerivationFunction)
class HKDF(object):
def __init__(self, algorithm, length, salt, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
if not (salt is None or isinstance(salt, bytes)):
raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * (self._algorithm.digest_size // 8)
self._salt = salt
self._backend = backend
self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
def _extract(self, key_material):
h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
h.update(key_material)
return h.finalize()
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._hkdf_expand.derive(self._extract(key_material))
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class HKDFExpand(object):
def __init__(self, algorithm, length, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
self._backend = backend
max_length = 255 * (algorithm.digest_size // 8)
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} octets.".format(
max_length
))
self._length = length
if not (info is None or isinstance(info, bytes)):
raise TypeError("info must be bytes.")
if info is None:
info = b""
self._info = info
self._used = False
def _expand(self, key_material):
output = [b""]
counter = 1
while (self._algorithm.digest_size // 8) * len(output) < self._length:
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
h.update(output[-1])
h.update(self._info)
h.update(six.int2byte(counter))
output.append(h.finalize())
counter += 1
return b"".join(output)[:self._length]
def derive(self, key_material):
|
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
| if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
if self._used:
raise AlreadyFinalized
self._used = True
return self._expand(key_material) |
pretty.rs | //! This module implements the functionality described in
//! ["Strictly Pretty" (2000) by Christian Lindig][0], with a few
//! extensions.
//!
//! [0]: http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.34.2200
use im::vector::Vector;
pub trait Documentable {
fn to_doc(self) -> Document;
}
impl Documentable for &str {
fn to_doc(self) -> Document {
Document::Text(self.to_string())
}
}
impl Documentable for String {
fn to_doc(self) -> Document {
Document::Text(self)
}
}
impl Documentable for isize {
fn to_doc(self) -> Document {
Document::Text(format!("{}", self))
}
}
impl Documentable for i64 {
fn to_doc(self) -> Document {
Document::Text(format!("{}", self))
}
}
impl Documentable for usize {
fn to_doc(self) -> Document {
Document::Text(format!("{}", self))
}
}
impl Documentable for f64 {
fn to_doc(self) -> Document {
Document::Text(format!("{:?}", self))
}
}
impl Documentable for u64 {
fn to_doc(self) -> Document {
Document::Text(format!("{:?}", self))
}
}
impl Documentable for Document {
fn to_doc(self) -> Document {
self
}
}
impl Documentable for Vec<Document> {
fn to_doc(self) -> Document {
concat(self.into_iter())
}
}
impl<D: Documentable> Documentable for Option<D> {
fn to_doc(self) -> Document {
match self {
Some(d) => d.to_doc(),
None => Document::Nil,
}
}
}
pub fn concat(mut docs: impl Iterator<Item = Document>) -> Document {
let init = docs.next().unwrap_or_else(|| nil());
docs.fold(init, |acc, doc| {
Document::Cons(Box::new(acc), Box::new(doc))
})
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Document {
/// Returns a document entity used to represent nothingness
Nil,
/// A mandatory linebreak
Line(usize),
/// Forces contained groups to break
ForceBreak,
/// May break contained document based on best fit, thus flex break
FlexBreak(Box<Document>),
/// Renders `broken` if group is broken, `unbroken` otherwise
Break {
broken: String,
unbroken: String,
},
/// Join 2 documents together
Cons(Box<Document>, Box<Document>),
/// Nests the given document by the given indent
Nest(isize, Box<Document>),
/// Nests the given document to the current cursor position
NestCurrent(Box<Document>),
/// Nests the given document to the current cursor position
Group(Box<Document>),
// May nest the given document based on best fit, thus flex group
FlexGroup(isize, Box<Document>),
/// A string to render
Text(String),
}
#[derive(Debug, Clone)]
enum Mode {
Broken,
Unbroken,
}
fn fits(mut limit: isize, mut docs: Vector<(isize, Mode, Document)>) -> bool {
loop {
if limit < 0 {
return false;
};
let (indent, mode, document) = match docs.pop_front() {
Some(x) => x,
None => return true,
};
match document {
Document::Nil => (),
Document::Line(_) => return true,
Document::ForceBreak => return false,
Document::FlexGroup(i, doc) => docs.push_front((i + indent, mode, *doc)),
Document::Nest(i, doc) => docs.push_front((i + indent, mode, *doc)),
Document::NestCurrent(doc) => docs.push_front((indent, mode, *doc)),
Document::Group(doc) => docs.push_front((indent, Mode::Unbroken, *doc)),
Document::Text(s) => limit -= s.len() as isize,
Document::Break { unbroken, .. } => match mode {
Mode::Broken => return true,
Mode::Unbroken => limit -= unbroken.len() as isize,
},
Document::FlexBreak(doc) => docs.push_front((indent, mode, *doc)),
Document::Cons(left, right) => {
docs.push_front((indent, mode.clone(), *right));
docs.push_front((indent, mode, *left));
}
}
}
}
pub fn format(limit: isize, doc: Document) -> String {
let mut buffer = String::new();
fmt(
&mut buffer,
limit,
0,
vector![(0, Mode::Unbroken, Document::Group(Box::new(doc)))],
);
buffer
}
fn fmt(b: &mut String, limit: isize, mut width: isize, mut docs: Vector<(isize, Mode, Document)>) {
while let Some((indent, mode, document)) = docs.pop_front() {
match document {
Document::Nil | Document::ForceBreak => (),
Document::Line(i) => {
for _ in 0..i {
b.push_str("\n");
}
b.push_str(" ".repeat(indent as usize).as_str());
width = indent;
}
Document::Break { broken, unbroken } => {
width = match mode {
Mode::Unbroken => {
b.push_str(unbroken.as_str());
width + unbroken.len() as isize
}
Mode::Broken => {
b.push_str(broken.as_str());
b.push_str("\n");
b.push_str(" ".repeat(indent as usize).as_str());
indent as isize
}
};
}
Document::Text(s) => {
width += s.len() as isize;
b.push_str(s.as_str());
}
Document::Cons(left, right) => {
docs.push_front((indent, mode.clone(), *right));
docs.push_front((indent, mode, *left));
}
Document::Nest(i, doc) => {
docs.push_front((indent + i, mode, *doc));
}
Document::NestCurrent(doc) => {
docs.push_front((width, mode, *doc));
}
Document::Group(doc) => {
docs.push_front((indent, Mode::Unbroken, (*doc).clone()));
if !fits(limit - width, docs.clone()) {
docs[0] = (indent, Mode::Broken, (*doc).clone());
}
}
Document::FlexBreak(doc) => {
docs.push_front((indent, Mode::Unbroken, (*doc).clone()));
if !fits(limit - width, docs.clone()) {
docs[0] = (indent, Mode::Broken, (*doc).clone());
}
}
Document::FlexGroup(i, doc) => {
docs.push_front((indent, Mode::Unbroken, (*doc).clone()));
if !fits(limit - width, docs.clone()) {
docs.insert(1, (indent, Mode::Broken, line()));
docs[0] = (indent + i, Mode::Broken, line().append((*doc).clone()));
}
}
}
}
}
#[test]
fn fits_test() {
use self::Document::*;
use self::Mode::*;
// Negative limits never fit
assert!(!fits(-1, vector![]));
// If no more documents it always fits
assert!(fits(0, vector![]));
// ForceBreak never fits
assert!(!fits(100, vector![(0, Unbroken, ForceBreak)]));
assert!(!fits(100, vector![(0, Broken, ForceBreak)]));
// Break in Broken fits always
assert!(fits(
1,
vector![(
0,
Broken,
Break {
broken: "12".to_string(),
unbroken: "".to_string()
}
)]
));
// Break in Unbroken mode fits if `unbroken` fits
assert!(fits(
3,
vector![(
0,
Unbroken,
Break {
broken: "".to_string(),
unbroken: "123".to_string()
}
)]
));
assert!(!fits(
2,
vector![(
0,
Unbroken,
Break {
broken: "".to_string(),
unbroken: "123".to_string()
}
)]
));
// Line always fits
assert!(fits(0, vector![(0, Broken, Line(100))]));
assert!(fits(0, vector![(0, Unbroken, Line(100))]));
// String fits if smaller than limit
assert!(fits(5, vector![(0, Broken, Text("Hello".to_string()))]));
assert!(fits(5, vector![(0, Unbroken, Text("Hello".to_string()))]));
assert!(!fits(4, vector![(0, Broken, Text("Hello".to_string()))]));
assert!(!fits(4, vector![(0, Unbroken, Text("Hello".to_string()))]));
// Cons fits if combined smaller than limit
assert!(fits(
2,
vector![(
0,
Broken,
Cons(
Box::new(Text("1".to_string())),
Box::new(Text("2".to_string()))
)
)]
));
assert!(fits(
2,
vector![(
0,
Unbroken,
Cons(
Box::new(Text("1".to_string())),
Box::new(Text("2".to_string()))
)
)]
));
assert!(!fits(
1,
vector![(
0,
Broken,
Cons(
Box::new(Text("1".to_string())),
Box::new(Text("2".to_string()))
)
)]
));
assert!(!fits(
1,
vector![(
0,
Unbroken,
Cons(
Box::new(Text("1".to_string())),
Box::new(Text("2".to_string()))
)
)]
));
// Nest fits if combined smaller than limit
assert!(fits(
2,
vector![(0, Broken, Nest(1, Box::new(Text("12".to_string())),))]
));
assert!(fits(
2,
vector![(0, Unbroken, Nest(1, Box::new(Text("12".to_string())),))]
));
assert!(!fits(
1,
vector![(0, Broken, Nest(1, Box::new(Text("12".to_string())),))]
));
assert!(!fits(
1,
vector![(0, Unbroken, Nest(1, Box::new(Text("12".to_string()))))]
));
// Nest fits if combined smaller than limit
assert!(fits(
2,
vector![(0, Broken, NestCurrent(Box::new(Text("12".to_string())),))]
));
assert!(fits(
2,
vector![(0, Unbroken, NestCurrent(Box::new(Text("12".to_string())),))]
));
assert!(!fits(
1,
vector![(0, Broken, NestCurrent(Box::new(Text("12".to_string())),))]
));
assert!(!fits(
1,
vector![(0, Unbroken, NestCurrent(Box::new(Text("12".to_string()))))]
));
}
#[test]
fn format_test() {
use self::Document::*;
let doc = Text("Hi".to_string());
assert_eq!("Hi".to_string(), format(10, doc));
let doc = Cons(
Box::new(Text("Hi".to_string())),
Box::new(Text(", world!".to_string())),
);
assert_eq!("Hi, world!".to_string(), format(10, doc));
let doc = Nil;
assert_eq!("".to_string(), format(10, doc));
let doc = Break {
broken: "broken".to_string(),
unbroken: "unbroken".to_string(),
};
assert_eq!("unbroken".to_string(), format(10, doc));
let doc = Break {
broken: "broken".to_string(),
unbroken: "unbroken".to_string(),
};
assert_eq!("broken\n".to_string(), format(5, doc));
let doc = Nest(
2,
Box::new(Cons(
Box::new(Text("1".to_string())),
Box::new(Cons(Box::new(Line(1)), Box::new(Text("2".to_string())))),
)),
);
assert_eq!("1\n 2".to_string(), format(1, doc));
let doc = Cons(
Box::new(Text("111".to_string())),
Box::new(NestCurrent(Box::new(Cons(
Box::new(Line(1)),
Box::new(Text("2".to_string())),
)))),
);
assert_eq!("111\n 2".to_string(), format(1, doc));
let doc = Cons(
Box::new(ForceBreak),
Box::new(Break {
broken: "broken".to_string(),
unbroken: "unbroken".to_string(),
}),
);
assert_eq!("broken\n".to_string(), format(100, doc));
}
pub fn nil() -> Document {
Document::Nil
}
pub fn line() -> Document {
Document::Line(1)
}
pub fn lines(i: usize) -> Document {
Document::Line(i)
}
pub fn force_break() -> Document {
Document::ForceBreak
}
pub fn break_(broken: &str, unbroken: &str) -> Document {
Document::Break {
broken: broken.to_string(),
unbroken: unbroken.to_string(),
}
}
pub fn delim(d: &str) -> Document {
Document::Break {
broken: d.to_string(),
unbroken: format!("{} ", d),
}
}
impl Document {
pub fn group(self) -> Document {
Document::Group(Box::new(self))
}
pub fn flex_group(self, indent: isize) -> Document {
Document::FlexGroup(indent, Box::new(self))
}
pub fn flex_break(self) -> Document {
Document::FlexBreak(Box::new(self))
}
pub fn | (self, indent: isize) -> Document {
Document::Nest(indent, Box::new(self))
}
pub fn nest_current(self) -> Document {
Document::NestCurrent(Box::new(self))
}
pub fn append(self, x: impl Documentable) -> Document {
Document::Cons(Box::new(self), Box::new(x.to_doc()))
}
pub fn format(self, limit: isize) -> String {
format(limit, self)
}
pub fn surround(self, open: impl Documentable, closed: impl Documentable) -> Document {
open.to_doc().append(self).append(closed)
}
}
| nest |
soln.rs | // This program is licensed under the "MIT License".
// Please see the file LICENSE in this distribution
// for license terms.
//! Advent of Code Day 10.
//! Bart Massey 2019
use std::collections::{HashMap, HashSet};
/// Type of coordinates and coordinate differences.
// XXX We could probably be more careful because the
// coordinates themselves are unsigned, but then Rust just
// forces a giant pile of casty garbage everywhere, so no.
type Coord = (i64, i64);
/// Represent the map as a set of asteroids.
type Map = HashSet<Coord>;
/// Read a map from a text description.
fn read_map<S>(lines: &[S]) -> Map
where
S: AsRef<str>,
{
let mut map = HashSet::new();
for (y, line) in lines.iter().enumerate() {
for (x, c) in line.as_ref().chars().enumerate() {
match c {
'#' => {
map.insert((x as i64, y as i64));
}
'.' => (),
c => panic!("unexpected char {} in map", c as u32),
}
}
}
map
}
/// This is the heart of the computation for this week.
/// Compute a "reduced slope" as a *run, rise* pair but
/// divided by their GCD. This is what the problem
/// description seems to mean by occlusion: same reduced
/// slope.
fn int_slope((dx, dy): Coord) -> Coord {
assert!(dx != 0 || dy != 0);
let q = aoc::gcd(dx.abs(), dy.abs());
(dx / q, dy / q)
}
// Do some computations taken from the first example to
// check the occlusion hypothesis.
#[test]
fn test_int_slope() {
assert_eq!(int_slope((-2, 4)), int_slope((-1, 2)));
assert!(int_slope((1, 2)) != int_slope((1, 4)));
assert!(int_slope((0, 2)) != int_slope((1, 4)));
}
/// Subtract second tuple from first.
// XXX Why isn't there tuple arithmetic by default in Rust?
// There just isn't, that's why.
fn coord_sub((x1, y1): Coord, (x2, y2): Coord) -> Coord {
(x1 - x2, y1 - y2)
}
/// Return a map from a slope from the given origin to the
/// collection of asteroid coordinates on the map that share
/// that slope. Each collection is a `Vec` in arbitrary
/// order.
fn find_slopes(map: &Map, origin: Coord) -> HashMap<Coord, Vec<Coord>> {
// Make an iterator over slope, coordinate pairs from
// the map.
let slopes = map
.iter()
.filter(|&&a| a != origin)
.map(|&a| (int_slope(coord_sub(a, origin)), a));
// Build up the result map using the iterator.
let mut sc = HashMap::new();
for (s, c) in slopes {
let coords = sc.entry(s).or_insert_with(Vec::new);
coords.push(c);
}
sc
}
/// Count the asteroids visible from a given origin
/// asteroid.
fn count_visible(map: &Map, origin: Coord) -> usize {
find_slopes(map, origin).len()
}
/// Find the optimal observatory origin and its count.
fn max_visibility(map: &Map) -> (usize, Coord) {
map.iter()
.map(|&a| (count_visible(map, a), a))
.max_by_key(|&a| a.0)
.expect("only one asteroid")
}
// Some of the tests from the problem.
#[test]
fn test_visible() {
#[rustfmt::skip] | "....#",
"...##",
];
let map = read_map(map);
#[rustfmt::skip]
let tests = &[
(8, (3, 4)),
(6, (0, 2)),
(7, (2, 2)),
(5, (4, 2)),
];
for &(c, a) in tests {
assert_eq!(c, count_visible(&map, a));
}
assert_eq!((8, (3, 4)), max_visibility(&map));
}
/// Return the coordinate of the nth asteroid vaporized by
/// the process described in Part 2 of the problem.
fn nth_vaporized(map: &Map, origin: Coord, mut n: usize) -> Coord {
assert!(n <= map.len());
// Get the slope sets.
let mut aslopes = find_slopes(map, origin);
// Sort each collection `Vec` in decreasing order by
// distance. This allows popping the nearest off the
// end. (A priority queue would be slightly more
// efficent than a `Vec` for Part 2, but less so for
// Part 1. So meh.)
for v in aslopes.values_mut() {
v.sort_by_key(|&c| {
let (x, y) = coord_sub(c, origin);
-(x * x + y * y)
});
}
// Make a vector of the slope sets sorted by increasing
// angle. To say that the `-atan2()` with backward
// arguments is a bit odd would be an understatement.
// Note the necessary use of `OrderedFloat` to avoid
// Rust's lack of total order on floats due to
// mishandling `NaN`.
let mut slopes: Vec<Coord> = aslopes.keys().cloned().collect();
slopes.sort_by_key(|&(x, y)| {
ordered_float::OrderedFloat::from(-f64::atan2(
x as f64, y as f64,
))
});
// Walk around the circle now that it is set up. No
// effort is made to remove empty sets from play. Code
// does try to detect infinite loop (for example if `n`
// is larger than the number of asteroids, although
// this is checked for above).
let mut i = 0;
let mut progress = false;
loop {
let slope = slopes[i];
let roids = aslopes.get_mut(&slope).unwrap();
if let Some(shot) = roids.pop() {
progress = true;
n -= 1;
if n == 0 {
return shot;
}
}
i = (i + 1) % slopes.len();
if i == 0 {
if !progress {
panic!("ran out of asteroids with n = {}", n);
}
progress = false;
}
}
}
// More tests from the problem.
#[test]
fn test_nth_vaporized() {
#[rustfmt::skip]
let map = &[
".#....#####...#..",
"##...##.#####..##",
"##...#...#.#####.",
"..#.....#...###..",
"..#.#.....#....##",
];
let map = read_map(map);
let origin = (8, 3);
#[rustfmt::skip]
let tests = &[
((8, 1), 1),
((9, 0), 2),
((15, 1), 9),
((2, 4), 19),
((5, 1), 27),
((14, 3), 36),
];
for &(c, n) in tests {
assert_eq!(c, nth_vaporized(&map, origin, n));
}
#[rustfmt::skip]
let map = &[
".#..##.###...#######",
"##.############..##.",
".#.######.########.#",
".###.#######.####.#.",
"#####.##.#.##.###.##",
"..#####..#.#########",
"####################",
"#.####....###.#.#.##",
"##.#################",
"#####.##.###..####..",
"..######..##.#######",
"####.##.####...##..#",
".#####..#.######.###",
"##...#.##########...",
"#.##########.#######",
".####.#.###.###.#.##",
"....##.##.###..#####",
".#.#.###########.###",
"#.#.#.#####.####.###",
"###.##.####.##.#..##",
];
let map = read_map(map);
let (nvis, origin) = max_visibility(&map);
assert_eq!((nvis, origin), (210, (11, 13)));
#[rustfmt::skip]
let tests = &[
(1, (11,12)),
(2, (12,1)),
(3, (12,2)),
(10, (12,8)),
(20, (16,0)),
(50, (16,9)),
(100, (10,16)),
(199, (9,6)),
(200, (8,2)),
(201, (10,9)),
(299, (11,1)),
];
for &(n, c) in tests {
assert_eq!(c, nth_vaporized(&map, origin, n));
}
}
pub fn main() {
let lines: Vec<String> = aoc::input_lines().collect();
let map = read_map(&lines);
let (nvis, origin) = max_visibility(&map);
let part = aoc::get_part();
match part {
aoc::Part1 => println!("{}", nvis),
aoc::Part2 => {
let (x, y) = nth_vaporized(&map, origin, 200);
println!("{}", x * 100 + y);
}
}
} | let map = &[
".#..#",
".....",
"#####", |
SignCommand.ts | import { KMSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../KMSClient";
import { SignRequest, SignResponse } from "../models/models_0";
import { deserializeAws_json1_1SignCommand, serializeAws_json1_1SignCommand } from "../protocols/Aws_json1_1";
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http";
import { Command as $Command } from "@aws-sdk/smithy-client";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
SerdeContext as __SerdeContext,
} from "@aws-sdk/types";
export type SignCommandInput = SignRequest;
export type SignCommandOutput = SignResponse & __MetadataBearer;
/**
* <p>Creates a <a href="https://en.wikipedia.org/wiki/Digital_signature">digital
* signature</a> for a message or message digest by using the private key in an asymmetric
* CMK. To verify the signature, use the <a>Verify</a> operation, or use the public
* key in the same asymmetric CMK outside of AWS KMS. For information about symmetric and asymmetric CMKs, see <a href="https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html">Using Symmetric and Asymmetric CMKs</a> in the <i>AWS Key Management Service Developer Guide</i>.</p>
* <p>Digital signatures are generated and verified by using asymmetric key pair, such as an RSA
* or ECC pair that is represented by an asymmetric customer master key (CMK). The key owner (or
* an authorized user) uses their private key to sign a message. Anyone with the public key can
* verify that the message was signed with that particular private key and that the message
* hasn't changed since it was signed. </p>
* <p>To use the <code>Sign</code> operation, provide the following information:</p>
* <ul>
* <li>
* <p>Use the <code>KeyId</code> parameter to identify an asymmetric CMK with a
* <code>KeyUsage</code> value of <code>SIGN_VERIFY</code>. To get the
* <code>KeyUsage</code> value of a CMK, use the <a>DescribeKey</a> operation.
* The caller must have <code>kms:Sign</code> permission on the CMK.</p>
* </li>
* <li>
* <p>Use the <code>Message</code> parameter to specify the message or message digest to
* sign. You can submit messages of up to 4096 bytes. To sign a larger message, generate a
* hash digest of the message, and then provide the hash digest in the <code>Message</code>
* parameter. To indicate whether the message is a full message or a digest, use the
* <code>MessageType</code> parameter.</p>
* </li>
* <li>
* <p>Choose a signing algorithm that is compatible with the CMK. </p>
* </li>
* </ul>
* <important>
* <p>When signing a message, be sure to record the CMK and the signing algorithm. This
* information is required to verify the signature.</p>
* </important>
* <p>To verify the signature that this operation generates, use the <a>Verify</a>
* operation. Or use the <a>GetPublicKey</a> operation to download the public key and
* then use the public key to verify the signature outside of AWS KMS. </p>
* <p>The CMK that you use for this operation must be in a compatible key state. For
* details, see <a href="https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html">How Key State Affects Use
* of a Customer Master Key</a> in the <i>AWS Key Management Service Developer Guide</i>.</p>
* <p>
* <b>Cross-account use</b>: Yes. To perform this operation with a CMK in a different AWS account, specify
* the key ARN or alias ARN in the value of the <code>KeyId</code> parameter.</p>
*
* <p>
* <b>Required permissions</b>: <a href="https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html">kms:Sign</a> (key policy)</p>
* <p>
* <b>Related operations</b>: <a>Verify</a>
* </p>
*/
export class | extends $Command<SignCommandInput, SignCommandOutput, KMSClientResolvedConfig> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: SignCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: KMSClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<SignCommandInput, SignCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const clientName = "KMSClient";
const commandName = "SignCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: SignRequest.filterSensitiveLog,
outputFilterSensitiveLog: SignResponse.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: SignCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_json1_1SignCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<SignCommandOutput> {
return deserializeAws_json1_1SignCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
}
| SignCommand |
hyperflex_ucsm_config_policy_ref.py | # coding: utf-8
"""
Intersight REST API
This is Intersight REST API
OpenAPI spec version: 1.0.9-262
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class HyperflexUcsmConfigPolicyRef(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'moid': 'str',
'object_type': 'str'
}
attribute_map = {
'moid': 'Moid',
'object_type': 'ObjectType'
}
def __init__(self, moid=None, object_type=None):
"""
HyperflexUcsmConfigPolicyRef - a model defined in Swagger
"""
self._moid = None
self._object_type = None
if moid is not None:
self.moid = moid
if object_type is not None:
self.object_type = object_type
@property
def moid(self):
"""
Gets the moid of this HyperflexUcsmConfigPolicyRef.
:return: The moid of this HyperflexUcsmConfigPolicyRef.
:rtype: str
"""
return self._moid
@moid.setter
def moid(self, moid):
"""
Sets the moid of this HyperflexUcsmConfigPolicyRef.
:param moid: The moid of this HyperflexUcsmConfigPolicyRef.
:type: str
"""
self._moid = moid
@property
def object_type(self):
"""
Gets the object_type of this HyperflexUcsmConfigPolicyRef.
:return: The object_type of this HyperflexUcsmConfigPolicyRef.
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""
Sets the object_type of this HyperflexUcsmConfigPolicyRef.
:param object_type: The object_type of this HyperflexUcsmConfigPolicyRef. |
self._object_type = object_type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, HyperflexUcsmConfigPolicyRef):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other | :type: str
""" |
cookie_tests.rs | extern crate httpmock;
use isahc::{prelude::*, Request};
use httpmock::Method::GET;
use httpmock::{Mock, MockServer};
#[test]
fn cookie_matching_test() {
// Arrange
let _ = env_logger::try_init();
let server = MockServer::start();
| .expect_cookie_exists("SESSIONID")
.expect_cookie("SESSIONID", "298zf09hf012fh2")
.return_status(200)
.create_on(&server);
// Act: Send the request and deserialize the response to JSON
let response = Request::get(&format!("http://{}", server.address()))
.header(
"Cookie",
"OTHERCOOKIE1=01234; SESSIONID=298zf09hf012fh2; OTHERCOOKIE2=56789; HttpOnly",
)
.body(())
.unwrap()
.send()
.unwrap();
// Assert
mock.assert();
assert_eq!(response.status(), 200);
} | let mock = Mock::new()
.expect_method(GET)
.expect_path("/") |
generators.go | /**
* Copyright (c) 2018 Dell Inc., or its subsidiaries. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package zk
import (
"fmt"
"reflect"
"strconv"
"strings"
"github.com/pravega/zookeeper-operator/pkg/apis/zookeeper/v1beta1"
appsv1 "k8s.io/api/apps/v1"
v1 "k8s.io/api/core/v1"
policyv1beta1 "k8s.io/api/policy/v1beta1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
const (
externalDNSAnnotationKey = "external-dns.alpha.kubernetes.io/hostname"
dot = "."
)
func headlessDomain(z *v1beta1.ZookeeperCluster) string {
return fmt.Sprintf("%s.%s.svc.%s", headlessSvcName(z), z.GetNamespace(), z.GetKubernetesClusterDomain())
}
func headlessSvcName(z *v1beta1.ZookeeperCluster) string {
return fmt.Sprintf("%s-headless", z.GetName())
}
var zkDataVolume = "data"
// MakeStatefulSet return a zookeeper stateful set from the zk spec
func MakeStatefulSet(z *v1beta1.ZookeeperCluster) *appsv1.StatefulSet {
extraVolumes := []v1.Volume{}
persistence := z.Spec.Persistence
pvcs := []v1.PersistentVolumeClaim{}
if strings.EqualFold(z.Spec.StorageType, "ephemeral") {
extraVolumes = append(extraVolumes, v1.Volume{
Name: zkDataVolume,
VolumeSource: v1.VolumeSource{
EmptyDir: &z.Spec.Ephemeral.EmptyDirVolumeSource,
},
})
} else {
pvcs = append(pvcs, v1.PersistentVolumeClaim{
ObjectMeta: metav1.ObjectMeta{
Name: zkDataVolume,
Labels: mergeLabels(
z.Spec.Labels,
map[string]string{"app": z.GetName()},
),
},
Spec: persistence.PersistentVolumeClaimSpec,
})
}
return &appsv1.StatefulSet{
TypeMeta: metav1.TypeMeta{
Kind: "StatefulSet",
APIVersion: "apps/v1",
},
ObjectMeta: metav1.ObjectMeta{
Name: z.GetName(),
Namespace: z.Namespace,
Labels: z.Spec.Labels,
},
Spec: appsv1.StatefulSetSpec{
ServiceName: headlessSvcName(z),
Replicas: &z.Spec.Replicas,
Selector: &metav1.LabelSelector{
MatchLabels: map[string]string{
"app": z.GetName(),
},
},
UpdateStrategy: appsv1.StatefulSetUpdateStrategy{
Type: appsv1.RollingUpdateStatefulSetStrategyType,
},
PodManagementPolicy: appsv1.OrderedReadyPodManagement,
Template: v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
GenerateName: z.GetName(),
Labels: mergeLabels(
z.Spec.Labels,
map[string]string{
"app": z.GetName(),
"kind": "ZookeeperMember",
},
),
Annotations: z.Spec.Pod.Annotations,
},
Spec: makeZkPodSpec(z, extraVolumes),
},
VolumeClaimTemplates: pvcs,
},
}
}
func makeZkPodSpec(z *v1beta1.ZookeeperCluster, volumes []v1.Volume) v1.PodSpec {
zkContainer := v1.Container{
Name: "zookeeper",
Image: z.Spec.Image.ToString(),
Ports: z.Spec.Ports,
Env: []v1.EnvVar{
{
Name: "ENVOY_SIDECAR_STATUS",
ValueFrom: &v1.EnvVarSource{
FieldRef: &v1.ObjectFieldSelector{
FieldPath: `metadata.annotations['sidecar.istio.io/status']`,
},
},
},
},
ImagePullPolicy: z.Spec.Image.PullPolicy,
ReadinessProbe: &v1.Probe{
InitialDelaySeconds: 10,
TimeoutSeconds: 10,
Handler: v1.Handler{
Exec: &v1.ExecAction{Command: []string{"zookeeperReady.sh"}},
},
},
LivenessProbe: &v1.Probe{
InitialDelaySeconds: 10,
TimeoutSeconds: 10,
Handler: v1.Handler{
Exec: &v1.ExecAction{Command: []string{"zookeeperLive.sh"}},
},
},
VolumeMounts: []v1.VolumeMount{
{Name: "data", MountPath: "/data"},
{Name: "conf", MountPath: "/conf"},
},
Lifecycle: &v1.Lifecycle{
PreStop: &v1.Handler{
Exec: &v1.ExecAction{
Command: []string{"zookeeperTeardown.sh"},
},
},
},
Command: []string{"/usr/local/bin/zookeeperStart.sh"},
}
if z.Spec.Pod.Resources.Limits != nil || z.Spec.Pod.Resources.Requests != nil {
zkContainer.Resources = z.Spec.Pod.Resources
}
volumes = append(volumes, v1.Volume{
Name: "conf",
VolumeSource: v1.VolumeSource{
ConfigMap: &v1.ConfigMapVolumeSource{
LocalObjectReference: v1.LocalObjectReference{
Name: z.ConfigMapName(),
},
},
},
})
zkContainer.Env = append(zkContainer.Env, z.Spec.Pod.Env...)
podSpec := v1.PodSpec{
Containers: append(z.Spec.Containers, zkContainer),
Affinity: z.Spec.Pod.Affinity,
Volumes: append(z.Spec.Volumes, volumes...),
}
if reflect.DeepEqual(v1.PodSecurityContext{}, z.Spec.Pod.SecurityContext) {
podSpec.SecurityContext = z.Spec.Pod.SecurityContext
}
podSpec.NodeSelector = z.Spec.Pod.NodeSelector
podSpec.Tolerations = z.Spec.Pod.Tolerations
podSpec.TerminationGracePeriodSeconds = &z.Spec.Pod.TerminationGracePeriodSeconds
podSpec.ServiceAccountName = z.Spec.Pod.ServiceAccountName
return podSpec
}
// MakeClientService returns a client service resource for the zookeeper cluster
func MakeClientService(z *v1beta1.ZookeeperCluster) *v1.Service {
ports := z.ZookeeperPorts()
svcPorts := []v1.ServicePort{
{Name: "tcp-client", Port: ports.Client},
}
return makeService(z.GetClientServiceName(), svcPorts, true, z)
}
// MakeConfigMap returns a zookeeper config map
func MakeConfigMap(z *v1beta1.ZookeeperCluster) *v1.ConfigMap {
return &v1.ConfigMap{
TypeMeta: metav1.TypeMeta{
Kind: "ConfigMap",
APIVersion: "v1",
},
ObjectMeta: metav1.ObjectMeta{
Name: z.ConfigMapName(),
Namespace: z.Namespace,
Labels: z.Spec.Labels,
},
Data: map[string]string{
"zoo.cfg": makeZkConfigString(z.Spec),
"log4j.properties": makeZkLog4JConfigString(),
"log4j-quiet.properties": makeZkLog4JQuietConfigString(),
"env.sh": makeZkEnvConfigString(z),
},
}
}
// MakeHeadlessService returns an internal headless-service for the zk
// stateful-set
func MakeHeadlessService(z *v1beta1.ZookeeperCluster) *v1.Service {
ports := z.ZookeeperPorts()
svcPorts := []v1.ServicePort{
{Name: "tcp-client", Port: ports.Client},
{Name: "tcp-quorum", Port: ports.Quorum},
{Name: "tcp-leader-election", Port: ports.Leader},
{Name: "tcp-metrics", Port: ports.Metrics},
}
return makeService(headlessSvcName(z), svcPorts, false, z)
}
func makeZkConfigString(s v1beta1.ZookeeperClusterSpec) string {
return "4lw.commands.whitelist=cons, envi, conf, crst, srvr, stat, mntr, ruok\n" +
"dataDir=/data\n" +
"standaloneEnabled=false\n" +
"reconfigEnabled=true\n" +
"skipACL=yes\n" +
"metricsProvider.className=org.apache.zookeeper.metrics.prometheus.PrometheusMetricsProvider\n" +
"metricsProvider.httpPort=7000\n" +
"metricsProvider.exportJvmInfo=true\n" +
"initLimit=" + strconv.Itoa(s.Conf.InitLimit) + "\n" +
"syncLimit=" + strconv.Itoa(s.Conf.SyncLimit) + "\n" +
"tickTime=" + strconv.Itoa(s.Conf.TickTime) + "\n" +
"quorumListenOnAllIPs=" + strconv.FormatBool(s.Conf.QuorumListenOnAllIPs) + "\n" +
"dynamicConfigFile=/data/zoo.cfg.dynamic\n"
}
func makeZkLog4JQuietConfigString() string {
return "log4j.rootLogger=ERROR, CONSOLE\n" +
"log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender\n" +
"log4j.appender.CONSOLE.Threshold=ERROR\n" +
"log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout\n" +
"log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [myid:%X{myid}] - %-5p [%t:%C{1}@%L] - %m%n\n"
}
func makeZkLog4JConfigString() string {
return "zookeeper.root.logger=CONSOLE\n" +
"zookeeper.console.threshold=INFO\n" +
"log4j.rootLogger=${zookeeper.root.logger}\n" +
"log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender\n" +
"log4j.appender.CONSOLE.Threshold=${zookeeper.console.threshold}\n" +
"log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout\n" +
"log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [myid:%X{myid}] - %-5p [%t:%C{1}@%L] - %m%n\n"
}
func makeZkEnvConfigString(z *v1beta1.ZookeeperCluster) string {
ports := z.ZookeeperPorts()
return "#!/usr/bin/env bash\n\n" +
"DOMAIN=" + headlessDomain(z) + "\n" +
"QUORUM_PORT=" + strconv.Itoa(int(ports.Quorum)) + "\n" +
"LEADER_PORT=" + strconv.Itoa(int(ports.Leader)) + "\n" +
"CLIENT_HOST=" + z.GetClientServiceName() + "\n" +
"CLIENT_PORT=" + strconv.Itoa(int(ports.Client)) + "\n" +
"CLUSTER_NAME=" + z.GetName() + "\n" +
"CLUSTER_SIZE=" + fmt.Sprint(z.Spec.Replicas) + "\n"
}
func makeService(name string, ports []v1.ServicePort, clusterIP bool, z *v1beta1.ZookeeperCluster) *v1.Service {
var dnsName string
var annotationMap map[string]string
if !clusterIP && z.Spec.DomainName != "" | else {
annotationMap = map[string]string{}
}
service := v1.Service{
TypeMeta: metav1.TypeMeta{
Kind: "Service",
APIVersion: "v1",
},
ObjectMeta: metav1.ObjectMeta{
Name: name,
Namespace: z.Namespace,
Labels: mergeLabels(
z.Spec.Labels,
map[string]string{"app": z.GetName(), "headless": strconv.FormatBool(!clusterIP)},
),
Annotations: annotationMap,
},
Spec: v1.ServiceSpec{
Ports: ports,
Selector: map[string]string{"app": z.GetName()},
},
}
if !clusterIP {
service.Spec.ClusterIP = v1.ClusterIPNone
}
return &service
}
// MakePodDisruptionBudget returns a pdb for the zookeeper cluster
func MakePodDisruptionBudget(z *v1beta1.ZookeeperCluster) *policyv1beta1.PodDisruptionBudget {
pdbCount := intstr.FromInt(1)
return &policyv1beta1.PodDisruptionBudget{
TypeMeta: metav1.TypeMeta{
Kind: "PodDisruptionBudget",
APIVersion: "policy/v1beta1",
},
ObjectMeta: metav1.ObjectMeta{
Name: z.GetName(),
Namespace: z.Namespace,
Labels: z.Spec.Labels,
},
Spec: policyv1beta1.PodDisruptionBudgetSpec{
MaxUnavailable: &pdbCount,
Selector: &metav1.LabelSelector{
MatchLabels: map[string]string{
"app": z.GetName(),
},
},
},
}
}
//MakeServiceAccount returns the service account for zookeeper Cluster
func MakeServiceAccount(z *v1beta1.ZookeeperCluster) *v1.ServiceAccount {
return &v1.ServiceAccount{
ObjectMeta: metav1.ObjectMeta{
Name: z.Spec.Pod.ServiceAccountName,
Namespace: z.Namespace,
},
}
}
// MergeLabels merges label maps
func mergeLabels(l ...map[string]string) map[string]string {
res := make(map[string]string)
for _, v := range l {
for lKey, lValue := range v {
res[lKey] = lValue
}
}
return res
}
| {
domainName := strings.TrimSpace(z.Spec.DomainName)
if strings.HasSuffix(domainName, dot) {
dnsName = name + dot + domainName
} else {
dnsName = name + dot + domainName + dot
}
annotationMap = map[string]string{externalDNSAnnotationKey: dnsName}
} |
calibrate.rs | //! The calibration stage. The fuzzer measures the average exec time and the bitmap size.
use crate::{
bolts::current_time,
corpus::{Corpus, PowerScheduleTestcaseMetaData},
executors::{Executor, HasObservers},
fuzzer::Evaluator,
inputs::Input,
observers::{MapObserver, ObserversTuple},
stages::Stage,
state::{HasCorpus, HasMetadata},
Error,
};
use alloc::{
string::{String, ToString},
vec::Vec,
};
use core::{marker::PhantomData, time::Duration};
use num_traits::PrimInt;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug)]
pub struct CalibrationStage<C, E, EM, I, O, OT, S, T, Z>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
I: Input,
O: MapObserver<T>,
OT: ObserversTuple<I, S>,
S: HasCorpus<C, I> + HasMetadata,
Z: Evaluator<E, EM, I, S>,
{
map_observer_name: String,
stage_max: usize,
#[allow(clippy::type_complexity)]
phantom: PhantomData<(C, E, EM, I, O, OT, S, T, Z)>,
}
const CAL_STAGE_MAX: usize = 8;
impl<C, E, EM, I, O, OT, S, T, Z> Stage<E, EM, S, Z>
for CalibrationStage<C, E, EM, I, O, OT, S, T, Z>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
I: Input,
O: MapObserver<T>,
OT: ObserversTuple<I, S>,
S: HasCorpus<C, I> + HasMetadata,
Z: Evaluator<E, EM, I, S>,
{
#[inline]
#[allow(clippy::let_and_return)]
fn perform(
&mut self,
fuzzer: &mut Z,
executor: &mut E,
state: &mut S,
manager: &mut EM,
corpus_idx: usize,
) -> Result<(), Error> {
let iter = self.stage_max;
let handicap = state
.metadata()
.get::<PowerScheduleMetadata>()
.ok_or_else(|| Error::KeyNotFound("PowerScheduleMetadata not found".to_string()))?
.queue_cycles;
let start = current_time();
for _i in 0..iter {
let input = state
.corpus()
.get(corpus_idx)?
.borrow_mut()
.load_input()?
.clone();
let _ = executor.run_target(fuzzer, state, manager, &input)?;
}
let end = current_time();
let map = executor
.observers()
.match_name::<O>(&self.map_observer_name)
.ok_or_else(|| Error::KeyNotFound("MapObserver not found".to_string()))?;
let bitmap_size = map.count_bytes();
let psmeta = state
.metadata_mut()
.get_mut::<PowerScheduleMetadata>()
.ok_or_else(|| Error::KeyNotFound("PowerScheduleMetadata not found".to_string()))?;
psmeta.set_exec_time(psmeta.exec_time() + (end - start));
psmeta.set_cycles(psmeta.cycles() + (iter as u64));
psmeta.set_bitmap_size(psmeta.bitmap_size() + bitmap_size);
psmeta.set_bitmap_entries(psmeta.bitmap_entries() + 1);
// println!("psmeta: {:#?}", psmeta);
let mut testcase = state.corpus().get(corpus_idx)?.borrow_mut();
testcase.set_exec_time((end - start) / (iter as u32));
// println!("time: {:#?}", testcase.exec_time());
let data = testcase
.metadata_mut()
.get_mut::<PowerScheduleTestcaseMetaData>()
.ok_or_else(|| Error::KeyNotFound("PowerScheduleTestData not found".to_string()))?;
data.set_bitmap_size(bitmap_size);
data.set_handicap(handicap);
data.set_fuzz_level(data.fuzz_level() + 1);
// println!("data: {:#?}", data);
Ok(())
}
}
pub const N_FUZZ_SIZE: usize = 1 << 21;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PowerScheduleMetadata {
/// Measured exec time during calibration
exec_time: Duration,
/// Calibration cycles
cycles: u64,
/// Size of the observer map
bitmap_size: u64,
/// Number of filled map entries
bitmap_entries: u64,
/// Queue cycles
queue_cycles: u64,
/// The vector to contain the frequency of each execution path.
n_fuzz: Vec<u32>,
}
/// The metadata for runs in the calibration stage.
impl PowerScheduleMetadata {
#[must_use]
pub fn new() -> Self {
Self {
exec_time: Duration::from_millis(0),
cycles: 0,
bitmap_size: 0,
bitmap_entries: 0,
queue_cycles: 0,
n_fuzz: vec![0; N_FUZZ_SIZE],
}
}
#[must_use]
pub fn exec_time(&self) -> Duration {
self.exec_time
}
pub fn set_exec_time(&mut self, time: Duration) {
self.exec_time = time;
}
#[must_use]
pub fn cycles(&self) -> u64 {
self.cycles
}
pub fn set_cycles(&mut self, val: u64) {
self.cycles = val;
}
#[must_use]
pub fn bitmap_size(&self) -> u64 {
self.bitmap_size
}
pub fn set_bitmap_size(&mut self, val: u64) {
self.bitmap_size = val;
}
#[must_use]
pub fn bitmap_entries(&self) -> u64 {
self.bitmap_entries
}
pub fn set_bitmap_entries(&mut self, val: u64) {
self.bitmap_entries = val;
}
#[must_use]
pub fn queue_cycles(&self) -> u64 |
pub fn set_queue_cycles(&mut self, val: u64) {
self.queue_cycles = val;
}
#[must_use]
pub fn n_fuzz(&self) -> &[u32] {
&self.n_fuzz
}
#[must_use]
pub fn n_fuzz_mut(&mut self) -> &mut [u32] {
&mut self.n_fuzz
}
}
crate::impl_serdeany!(PowerScheduleMetadata);
impl<C, E, I, EM, O, OT, S, T, Z> CalibrationStage<C, E, EM, I, O, OT, S, T, Z>
where
T: PrimInt + Default + Copy + 'static + serde::Serialize + serde::de::DeserializeOwned,
C: Corpus<I>,
E: Executor<EM, I, S, Z> + HasObservers<I, OT, S>,
I: Input,
O: MapObserver<T>,
OT: ObserversTuple<I, S>,
S: HasCorpus<C, I> + HasMetadata,
Z: Evaluator<E, EM, I, S>,
{
pub fn new(state: &mut S, map_observer_name: &O) -> Self {
state.add_metadata::<PowerScheduleMetadata>(PowerScheduleMetadata::new());
Self {
map_observer_name: map_observer_name.name().to_string(),
stage_max: CAL_STAGE_MAX,
phantom: PhantomData,
}
}
}
impl Default for PowerScheduleMetadata {
fn default() -> Self {
Self::new()
}
}
| {
self.queue_cycles
} |
sys_res_controller.rs | use actix_web::{web, Responder};
use chrono::NaiveDateTime;
use rbatis::core::value::DateTimeNow;
use crate::domain::domain::SysRes;
use crate::domain::dto::{EmptyDTO, IdDTO, ResAddDTO, ResEditDTO, ResPageDTO};
use crate::domain::vo::RespVO;
use crate::service::CONTEXT;
use rbatis::plugin::snowflake::new_snowflake_id;
/// 资源分页(json请求)
pub async fn page(page: web::Json<ResPageDTO>) -> impl Responder {
let data = CONTEXT.sys_res_service.page(&page.0).await;
RespVO::from_result(&data).resp_json()
}
/// 资源全部(json请求)
pub async fn all(page: web::Json<EmptyDTO>) -> impl Responder {
let data = CONTEXT.sys_res_service.finds_all().await;
RespVO::from_result(&data).resp_json()
}
/// 顶层权限
pub async fn layer_top(page: web::Json<EmptyDTO>) -> impl Responder {
let data = CONTEXT.sys_res_service.finds_layer_top().await;
RespVO::from_result(&data).resp_json()
}
///资源添加
pub async fn add(mut arg: web::Json<ResAddDTO>) -> impl Responder {
if arg.name.is_none() {
return RespVO::<u64>::from_error_info("", "资源名字不能为空!").resp_json();
}
if arg.permission.is_none() {
return RespVO::<u64>::from_error_info("", "资源permission不能为空!").resp_json();
}
if arg.path.is_none() {
arg.path = Some("".to_string());
}
let res = SysRes {
id: Some(new_snowflake_id().to_string()),
parent_id: arg.parent_id.clone(),
name: arg.name.clone(),
permission: arg.permission.clone(),
path: arg.path.clone(),
del: Some(0),
create_date: Some(NaiveDateTime::now()),
};
let data = CONTEXT.sys_res_service.add(&res).await;
CONTEXT.sys_res_service.update_all().await;
RespVO::from_result(&data).resp_json()
} |
///资源修改
pub async fn update(arg: web::Json<ResEditDTO>) -> impl Responder {
let data = CONTEXT.sys_res_service.edit(&arg.0).await;
CONTEXT.sys_res_service.update_all().await;
RespVO::from_result(&data).resp_json()
}
///资源删除
pub async fn remove(arg: web::Json<IdDTO>) -> impl Responder {
let data = CONTEXT
.sys_res_service
.remove(&arg.0.id.unwrap_or_default())
.await;
CONTEXT.sys_res_service.update_all().await;
RespVO::from_result(&data).resp_json()
} | |
test_unicode.py | import simplejson as S
def | ():
encoder = S.JSONEncoder(encoding='utf-8')
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
s = u.encode('utf-8')
ju = encoder.encode(u)
js = encoder.encode(s)
assert ju == js
def test_encoding2():
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
s = u.encode('utf-8')
ju = S.dumps(u, encoding='utf-8')
js = S.dumps(s, encoding='utf-8')
assert ju == js
def test_big_unicode_encode():
u = u'\U0001d120'
assert S.dumps(u) == '"\\ud834\\udd20"'
assert S.dumps(u, ensure_ascii=False) == '"\\ud834\\udd20"'
def test_big_unicode_decode():
u = u'z\U0001d120x'
assert S.loads('"' + u + '"') == u
assert S.loads('"z\\ud834\\udd20x"') == u
def test_unicode_decode():
for i in range(0, 0xd7ff):
u = unichr(i)
json = '"\\u%04x"' % (i,)
res = S.loads(json)
assert res == u, 'S.loads(%r) != %r got %r' % (json, u, res)
if __name__ == '__main__':
test_unicode_decode()
| test_encoding1 |
client.dev.js | const path = require('path')
const webpack = require('webpack')
const WriteFilePlugin = require('write-file-webpack-plugin')
const AutoDllPlugin = require('autodll-webpack-plugin')
const ExtractCssChunks = require('extract-css-chunks-webpack-plugin')
module.exports = {
name: 'client',
target: 'web',
// devtool: 'source-map',
devtool: 'eval',
entry: [
'babel-polyfill',
'webpack-hot-middleware/client?path=/__webpack_hmr&timeout=20000&reload=false&quiet=false&noInfo=false',
'react-hot-loader/patch',
path.resolve(__dirname, '../src/index.js')
],
output: {
filename: '[name].js',
chunkFilename: '[name].js',
path: path.resolve(__dirname, '../buildClient'),
publicPath: '/static/'
},
module: {
rules: [
{
test: /\.js$/, | use: 'babel-loader'
},
{
test: /\.css$/,
use: ExtractCssChunks.extract({
use: {
loader: 'css-loader',
options: {
modules: true,
localIdentName: '[name]__[local]--[hash:base64:5]'
}
}
})
}
]
},
resolve: {
extensions: ['.js', '.css'],
alias: {
'redux-first-router': path.resolve(__dirname, '../../../src'),
'redux-first-router-link': path.resolve(__dirname, '../../../src/Link')
}
},
plugins: [
new WriteFilePlugin(), // used so you can see what chunks are produced in dev
new ExtractCssChunks(),
new webpack.optimize.CommonsChunkPlugin({
names: ['bootstrap'], // needed to put webpack bootstrap code before chunks
filename: '[name].js',
minChunks: Infinity
}),
new webpack.HotModuleReplacementPlugin(),
new webpack.NoEmitOnErrorsPlugin(),
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify('development')
}
}),
new AutoDllPlugin({
context: path.join(__dirname, '..'),
filename: '[name].js',
entry: {
vendor: [
'react',
'react-dom',
'react-redux',
'redux',
'history/createBrowserHistory',
'transition-group',
'redux-first-router',
'redux-first-router-link',
'babel-polyfill',
'redux-devtools-extension/logOnlyInProduction'
]
}
})
]
} | exclude: /node_modules/, |
command_handler.py | import re
import discord
from commands import set_presence, avatar, erp
from commands.admin import list_user_admin, add_user_admin, rm_user_admin
from commands.birthday import (set_channel_bd, show_channel_bd, set_user_bd, set_notif_time, list_user_bd,
manual_bd_check, show_message_bd, set_message_bd)
from commands.cant_be_disabled import disable, enable, help
from commands.music import play, leave, repeat, now_playing, resume, pause, volume, next, previous, queue, search
class CommandHandler(object):
do_not_disable = ["enable", "disable", "help"]
dict_cmd_name = "cmd_name"
dict_enabled = "enabled"
def __init__(self, client):
self.parent_client = client
self.commands = self.get_commands()
self.set_every_command_state()
def set_every_command_state(self):
if self.parent_client.settings.user_command_states is None:
return
for cmd in self.commands:
if self.command_state_exists(cmd.cmd_name):
cmd.enabled = self.get_command_enabled(cmd.cmd_name)
def get_command_enabled(self, cmd_name):
if self.parent_client.settings.user_command_states is None:
return None
for cmd_state in self.parent_client.settings.user_command_states:
if cmd_state[self.dict_cmd_name] == cmd_name:
if cmd_state[self.dict_enabled] == "True":
return True
else:
return False
return None
def command_state_exists(self, cmd_name):
|
def get_cmd(self, command_name):
"""Returns a Command with a command name
:param command_name:
:return: Command
"""
for command in self.commands:
if command.cmd_name == command_name:
return command
return None
def get_commands(self):
return [set_presence.SetPresence(self), disable.Disable(self), enable.Enable(self),
manual_bd_check.ManualBDCheck(self), set_notif_time.SetNotifTime(self),
add_user_admin.AddUserAdmin(self), rm_user_admin.RmUserAdmin(self),
set_message_bd.SetMessageBD(self), show_message_bd.ShowMessageBD(self),
set_channel_bd.SetChannelBD(self), show_channel_bd.ShowChannelBD(self),
list_user_admin.ListUserAdmin(self), list_user_bd.ListUserBD(self), set_user_bd.SetUserBD(self),
avatar.Avatar(self), play.Play(self), leave.Leave(self), resume.Resume(self), pause.Pause(self),
now_playing.NowPlaying(self), repeat.Repeat(self), volume.Volume(self), previous.Previous(self),
next.Next(self), queue.Queue(self), search.Search(self), erp.Erp(self), help.Help(self)]
async def check_message(self, message: discord.Message):
for cmd in self.commands:
argument = re.compile("^" + self.parent_client.prefix + "[a-z]*").search(message.content.lower())
if argument is not None:
if argument.group() == self.parent_client.prefix + cmd.cmd_name:
await cmd.command(message)
def get_cmd_inlines(self):
return [cmd.get_help_inline() for cmd in self.commands]
def enable_command(self, command_name):
try:
cmd = self.get_cmd(command_name)
if cmd in self.do_not_disable:
return "Attempted to enable an unchangeable command."
cmd.enabled = True
if self.command_state_exists(cmd.cmd_name):
self.parent_client.settings.delete_command_state(
{self.dict_cmd_name: cmd.cmd_name, self.dict_enabled: "False"})
return "Enabled '{}'!".format(command_name)
except AttributeError:
return "Failed to enable command, '{}' doesn't exist.".format(command_name)
def disable_command(self, command_name):
try:
cmd = self.get_cmd(command_name)
if cmd in self.do_not_disable:
return "Attempted to disable an unchangeable command."
cmd.enabled = False
if not self.command_state_exists(cmd.cmd_name):
self.parent_client.settings.save_user_defaults(
command_state={self.dict_cmd_name: cmd.cmd_name, self.dict_enabled: "False"})
return "Disabled '{}'!".format(command_name)
except AttributeError:
return "Failed to disable command, '{}' doesn't exist.".format(command_name)
| if self.parent_client.settings.user_command_states is None:
return False
for cmd_state in self.parent_client.settings.user_command_states:
if cmd_state[self.dict_cmd_name] == cmd_name:
return True
return False |
exec.rs | // Copyright 2018 Grove Enterprises LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Query execution
use std::cell::RefCell;
use std::clone::Clone;
use std::collections::HashMap;
use std::collections::HashSet;
use std::convert::*;
use std::fs::File;
use std::io::BufWriter;
use std::iter::Iterator;
use std::rc::Rc;
use std::str;
use std::string::String;
use arrow::array::ListArray;
use arrow::builder::*;
use arrow::datatypes::*;
use arrow::list_builder::*;
use super::dataframe::*;
use super::datasources::common::*;
use super::datasources::csv::*;
use super::datasources::empty::*;
use super::datasources::ndjson::*;
use super::datasources::parquet::*;
use super::errors::*;
use super::logical::*;
use super::relations::aggregate::*;
use super::relations::filter::*;
use super::relations::limit::*;
use super::relations::projection::*;
use super::sqlast::ASTNode::*;
use super::sqlast::FileType;
use super::sqlparser::*;
use super::sqlplanner::*;
use super::types::*;
//use super::cluster::*;
#[derive(Debug, Clone)]
pub enum DFConfig {
Local,
Remote { etcd: String },
}
macro_rules! compare_arrays_inner {
($V1:ident, $V2:ident, $F:expr) => {
match ($V1.data(), $V2.data()) {
(&ArrayData::Float32(ref a), &ArrayData::Float32(ref b)) =>
Ok(a.iter().zip(b.iter()).map($F).collect::<Vec<bool>>()),
(&ArrayData::Float64(ref a), &ArrayData::Float64(ref b)) =>
Ok(a.iter().zip(b.iter()).map($F).collect::<Vec<bool>>()),
(&ArrayData::Int8(ref a), &ArrayData::Int8(ref b)) =>
Ok(a.iter().zip(b.iter()).map($F).collect::<Vec<bool>>()),
(&ArrayData::Int16(ref a), &ArrayData::Int16(ref b)) =>
Ok(a.iter().zip(b.iter()).map($F).collect::<Vec<bool>>()),
(&ArrayData::Int32(ref a), &ArrayData::Int32(ref b)) =>
Ok(a.iter().zip(b.iter()).map($F).collect::<Vec<bool>>()),
(&ArrayData::Int64(ref a), &ArrayData::Int64(ref b)) =>
Ok(a.iter().zip(b.iter()).map($F).collect::<Vec<bool>>()),
//(&ArrayData::Utf8(ref a), &ScalarValue::Utf8(ref b)) => a.iter().map(|n| n > b).collect(),
_ => Err(ExecutionError::General("Unsupported types in compare_arrays_inner".to_string()))
}
}
}
macro_rules! compare_arrays {
($V1:ident, $V2:ident, $F:expr) => {
Ok(Value::Column(Rc::new(Array::from(compare_arrays_inner!(
$V1, $V2, $F
)?))))
};
}
macro_rules! compare_array_with_scalar_inner {
($V1:ident, $V2:ident, $F:expr) => {
match ($V1.data(), $V2.as_ref()) {
(&ArrayData::UInt8(ref a), &ScalarValue::UInt8(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::UInt16(ref a), &ScalarValue::UInt16(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::UInt32(ref a), &ScalarValue::UInt32(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::UInt64(ref a), &ScalarValue::UInt64(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::Int8(ref a), &ScalarValue::Int8(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::Int16(ref a), &ScalarValue::Int16(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::Int32(ref a), &ScalarValue::Int32(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::Int64(ref a), &ScalarValue::Int64(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::Float32(ref a), &ScalarValue::Float32(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
(&ArrayData::Float64(ref a), &ScalarValue::Float64(b)) => {
Ok(a.iter().map(|aa| (aa, b)).map($F).collect::<Vec<bool>>())
}
_ => Err(ExecutionError::General(
"Unsupported types in compare_array_with_scalar_inner".to_string(),
)),
}
};
}
macro_rules! compare_array_with_scalar {
($V1:ident, $V2:ident, $F:expr) => {
Ok(Value::Column(Rc::new(Array::from(
compare_array_with_scalar_inner!($V1, $V2, $F)?,
))))
};
}
macro_rules! inner_column_operations {
($A:ident, $B:ident, $F:expr, $RT:ident) => {
Ok(Value::Column(Rc::new(Array::from(
$A.iter().zip($B.iter()).map($F).collect::<Vec<$RT>>(),
))))
};
}
macro_rules! scalar_operations {
($A:ident, $B:ident, $F:expr, $RT:ident) => {
Ok(Value::Column(Rc::new(Array::from(
$A.iter().map(|aa| (aa, $B)).map($F).collect::<Vec<$RT>>(),
))))
};
}
macro_rules! scalar_column_operations {
($X1:ident, $X2:ident, $F:expr) => {
match ($X1.as_ref(), $X2.data()) {
(ScalarValue::UInt8(a), ArrayData::UInt8(b)) => scalar_operations!(b, a, $F, u8),
(ScalarValue::UInt16(a), ArrayData::UInt16(b)) => scalar_operations!(b, a, $F, u16),
(ScalarValue::UInt32(a), ArrayData::UInt32(b)) => scalar_operations!(b, a, $F, u32),
(ScalarValue::UInt64(a), ArrayData::UInt64(b)) => scalar_operations!(b, a, $F, u64),
(ScalarValue::Int8(a), ArrayData::Int8(b)) => scalar_operations!(b, a, $F, i8),
(ScalarValue::Int16(a), ArrayData::Int16(b)) => scalar_operations!(b, a, $F, i16),
(ScalarValue::Int32(a), ArrayData::Int32(b)) => scalar_operations!(b, a, $F, i32),
(ScalarValue::Int64(a), ArrayData::Int64(b)) => scalar_operations!(b, a, $F, i64),
(ScalarValue::Float32(a), ArrayData::Float32(b)) => {
scalar_operations!(b, a, $F, f32)
}
(ScalarValue::Float64(a), ArrayData::Float64(b)) => {
scalar_operations!(b, a, $F, f64)
}
ref t => panic!(
"Cannot combine results for Scalar Type: {} and Column: {}",
t.0, t.1
),
};
};
}
macro_rules! scalar_scalar_operations {
($X1:ident, $X2:ident, $F:expr) => {
match ($X1.as_ref(), $X2.as_ref()) {
(ScalarValue::UInt8(a), ScalarValue::UInt8(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::UInt8($F(a, b)))))
}
(ScalarValue::UInt16(a), ScalarValue::UInt16(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::UInt16($F(a, b)))))
}
(ScalarValue::UInt32(a), ScalarValue::UInt32(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::UInt32($F(a, b)))))
}
(ScalarValue::UInt64(a), ScalarValue::UInt64(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::UInt64($F(a, b)))))
}
(ScalarValue::Int8(a), ScalarValue::Int8(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::Int8($F(a, b)))))
}
(ScalarValue::Int16(a), ScalarValue::Int16(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::Int16($F(a, b)))))
}
(ScalarValue::Int32(a), ScalarValue::Int32(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::Int32($F(a, b)))))
}
(ScalarValue::Int64(a), ScalarValue::Int64(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::Int64($F(a, b)))))
}
(ScalarValue::Float32(a), ScalarValue::Float32(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::Float32($F(a, b)))))
}
(ScalarValue::Float64(a), ScalarValue::Float64(b)) => {
Ok(Value::Scalar(Rc::new(ScalarValue::Float64($F(a, b)))))
}
ref t => panic!(
"Cannot combine results for Scalar Type: {} and Column: {}",
t.0, t.1
),
};
};
}
macro_rules! column_operations {
($X:ident, $Y:ident, $F:expr) => {
match ($X.data(), $Y.data()) {
(ArrayData::UInt8(ref a), ArrayData::UInt8(ref b)) => {
inner_column_operations!(a, b, $F, u8)
}
(ArrayData::UInt16(ref a), ArrayData::UInt16(ref b)) => {
inner_column_operations!(a, b, $F, u16)
}
(ArrayData::UInt32(ref a), ArrayData::UInt32(ref b)) => {
inner_column_operations!(a, b, $F, u32)
}
(ArrayData::UInt64(ref a), ArrayData::UInt64(ref b)) => {
inner_column_operations!(a, b, $F, u64)
}
(ArrayData::Int8(ref a), ArrayData::Int8(ref b)) => {
inner_column_operations!(a, b, $F, i8)
}
(ArrayData::Int16(ref a), ArrayData::Int16(ref b)) => {
inner_column_operations!(a, b, $F, i16)
}
(ArrayData::Int32(ref a), ArrayData::Int32(ref b)) => {
inner_column_operations!(a, b, $F, i32)
}
(ArrayData::Int64(ref a), ArrayData::Int64(ref b)) => {
inner_column_operations!(a, b, $F, i64)
}
(ArrayData::Float32(ref a), ArrayData::Float32(ref b)) => {
inner_column_operations!(a, b, $F, f32)
}
(ArrayData::Float64(ref a), ArrayData::Float64(ref b)) => {
inner_column_operations!(a, b, $F, f64)
}
ref t => panic!("Incompatible types for Column: {} and Column: {}", t.0, t.1),
}
};
}
impl Value {
pub fn is_null(&self) -> Result<Value> {
match self {
Value::Column(ref array) => {
let mut b: Builder<bool> = Builder::new();
match array.validity_bitmap() {
Some(bitmap) => {
//TODO should be able to just copy the bitmap and return it as a bitpacked array
for i in 0..array.len() {
if bitmap.is_set(i) {
b.push(false);
} else {
b.push(true);
}
}
}
None => {
for _ in 0..array.len() {
b.push(false);
}
}
}
let bools = b.finish();
assert_eq!(bools.len(), array.len());
Ok(Value::Column(Rc::new(Array::new(
array.len(),
ArrayData::from(bools),
))))
}
Value::Scalar(_) => unimplemented!(),
}
}
pub fn is_not_null(&self) -> Result<Value> {
match self {
Value::Column(ref array) => {
let mut b: Builder<bool> = Builder::new();
match array.validity_bitmap() {
Some(bitmap) => {
//TODO should be able to just copy the bitmap and return it as a bitpacked array
for i in 0..array.len() {
if bitmap.is_set(i) {
b.push(true);
} else {
b.push(false);
}
}
}
None => {
for _ in 0..array.len() {
b.push(true);
}
}
}
let bools = b.finish();
assert_eq!(bools.len(), array.len());
Ok(Value::Column(Rc::new(Array::new(
array.len(),
ArrayData::from(bools),
))))
}
Value::Scalar(_) => unimplemented!(),
}
}
pub fn eq(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
compare_arrays!(v1, v2, |(aa, bb)| aa == bb)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => match (v1.data(), v2.as_ref()) {
(&ArrayData::Utf8(ref list), &ScalarValue::Utf8(ref b)) => {
let mut v: Vec<bool> = Vec::with_capacity(list.len() as usize);
for i in 0..list.len() as usize {
v.push(list.get(i) == b.as_bytes());
}
Ok(Value::Column(Rc::new(Array::from(v))))
}
_ => compare_array_with_scalar!(v1, v2, |(aa, bb)| aa != bb),
},
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
compare_array_with_scalar!(v2, v1, |(aa, bb)| aa == bb)
}
(&Value::Scalar(ref _v1), &Value::Scalar(ref _v2)) => unimplemented!(),
}
}
pub fn not_eq(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
compare_arrays!(v1, v2, |(aa, bb)| aa != bb)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => match (v1.data(), v2.as_ref()) {
(&ArrayData::Utf8(ref list), &ScalarValue::Utf8(ref b)) => {
let mut v: Vec<bool> = Vec::with_capacity(list.len() as usize);
for i in 0..list.len() as usize {
v.push(list.get(i) != b.as_bytes());
}
Ok(Value::Column(Rc::new(Array::from(v))))
}
_ => compare_array_with_scalar!(v1, v2, |(aa, bb)| aa != bb),
},
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
compare_array_with_scalar!(v2, v1, |(aa, bb)| aa != bb)
}
(&Value::Scalar(ref _v1), &Value::Scalar(ref _v2)) => unimplemented!(),
}
}
pub fn lt(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
compare_arrays!(v1, v2, |(aa, bb)| aa < bb)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
compare_array_with_scalar!(v1, v2, |(aa, bb)| aa < bb)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
compare_array_with_scalar!(v2, v1, |(aa, bb)| aa < bb)
}
(&Value::Scalar(ref _v1), &Value::Scalar(ref _v2)) => unimplemented!(),
}
}
pub fn lt_eq(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
compare_arrays!(v1, v2, |(aa, bb)| aa <= bb)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
compare_array_with_scalar!(v1, v2, |(aa, bb)| aa <= bb)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
compare_array_with_scalar!(v2, v1, |(aa, bb)| aa <= bb)
}
(&Value::Scalar(ref _v1), &Value::Scalar(ref _v2)) => unimplemented!(),
}
}
pub fn gt(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
compare_arrays!(v1, v2, |(aa, bb)| aa >= bb)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
compare_array_with_scalar!(v1, v2, |(aa, bb)| aa >= bb)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
compare_array_with_scalar!(v2, v1, |(aa, bb)| aa >= bb)
}
(&Value::Scalar(ref _v1), &Value::Scalar(ref _v2)) => unimplemented!(),
}
}
pub fn gt_eq(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
compare_arrays!(v1, v2, |(aa, bb)| aa > bb)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
compare_array_with_scalar!(v1, v2, |(aa, bb)| aa > bb)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
compare_array_with_scalar!(v2, v1, |(aa, bb)| aa > bb)
}
(&Value::Scalar(ref _v1), &Value::Scalar(ref _v2)) => unimplemented!(),
}
}
pub fn add(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
column_operations!(v1, v2, |(x, y)| x + y)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
scalar_column_operations!(v1, v2, |(x, y)| x + y)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
scalar_column_operations!(v2, v1, |(x, y)| x + y)
}
(&Value::Scalar(ref x1), &Value::Scalar(ref x2)) => {
scalar_scalar_operations!(x1, x2, |x, y| x + y)
}
}
}
pub fn subtract(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
column_operations!(v1, v2, |(x, y)| x - y)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
scalar_column_operations!(v1, v2, |(x, y)| x - y)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
scalar_column_operations!(v2, v1, |(x, y)| x - y)
}
(&Value::Scalar(ref x1), &Value::Scalar(ref x2)) => {
scalar_scalar_operations!(x1, x2, |x, y| x - y)
}
}
}
pub fn divide(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
column_operations!(v1, v2, |(x, y)| x / y)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
scalar_column_operations!(v1, v2, |(x, y)| x / y)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
scalar_column_operations!(v2, v1, |(x, y)| x / y)
}
(&Value::Scalar(ref x1), &Value::Scalar(ref x2)) => {
scalar_scalar_operations!(x1, x2, |x, y| x / y)
}
}
}
pub fn multiply(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
column_operations!(v1, v2, |(x, y)| x * y)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
scalar_column_operations!(v1, v2, |(x, y)| x * y)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
scalar_column_operations!(v2, v1, |(x, y)| x * y)
}
(&Value::Scalar(ref x1), &Value::Scalar(ref x2)) => {
scalar_scalar_operations!(x1, x2, |x, y| x * y)
}
}
}
pub fn modulo(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => {
column_operations!(v1, v2, |(x, y)| x % y)
}
(&Value::Scalar(ref v1), &Value::Column(ref v2)) => {
scalar_column_operations!(v1, v2, |(x, y)| x % y)
}
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => {
scalar_column_operations!(v2, v1, |(x, y)| x % y)
}
(&Value::Scalar(ref x1), &Value::Scalar(ref x2)) => {
scalar_scalar_operations!(x1, x2, |x, y| x % y)
}
}
}
pub fn and(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => match (v1.data(), v2.data()) {
(ArrayData::Boolean(ref l), ArrayData::Boolean(ref r)) => {
let bools = l
.iter()
.zip(r.iter())
.map(|(ll, rr)| ll && rr)
.collect::<Vec<bool>>();
let bools = Array::from(bools);
Ok(Value::Column(Rc::new(bools)))
}
_ => panic!("AND expected two boolean inputs"),
},
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => match (v1.data(), v2.as_ref()) {
(ArrayData::Boolean(ref l), ScalarValue::Boolean(r)) => {
let bools = Array::from(l.iter().map(|ll| ll && *r).collect::<Vec<bool>>());
Ok(Value::Column(Rc::new(bools)))
}
_ => panic!("AND expected two boolean inputs"),
},
_ => unimplemented!(),
}
}
pub fn or(&self, other: &Value) -> Result<Value> {
match (self, other) {
(&Value::Column(ref v1), &Value::Column(ref v2)) => match (v1.data(), v2.data()) {
(ArrayData::Boolean(ref l), ArrayData::Boolean(ref r)) => {
let bools = l
.iter()
.zip(r.iter())
.map(|(ll, rr)| ll || rr)
.collect::<Vec<bool>>();
let bools = Array::from(bools);
Ok(Value::Column(Rc::new(bools)))
}
_ => panic!("OR expected two boolean inputs"),
},
(&Value::Column(ref v1), &Value::Scalar(ref v2)) => match (v1.data(), v2.as_ref()) {
(ArrayData::Boolean(ref l), ScalarValue::Boolean(r)) => {
let bools = Array::from(l.iter().map(|ll| ll || *r).collect::<Vec<bool>>());
Ok(Value::Column(Rc::new(bools)))
}
_ => panic!("OR expected two boolean inputs"),
},
_ => unimplemented!(),
}
}
}
/// Compiled Expression (basically just a closure to evaluate the expression at runtime)
pub type CompiledExpr = Rc<Fn(&RecordBatch) -> Result<Value>>;
pub type CompiledCastFunction = Rc<Fn(&Value) -> Result<Value>>;
pub enum AggregateType {
Min,
Max,
Sum,
Count,
Avg,
//CountDistinct()
}
/// Runtime expression
pub enum RuntimeExpr {
Compiled {
f: CompiledExpr,
t: DataType,
},
AggregateFunction {
f: AggregateType,
args: Vec<CompiledExpr>,
t: DataType,
},
}
impl RuntimeExpr {
pub fn get_func(&self) -> CompiledExpr {
match self {
&RuntimeExpr::Compiled { ref f, .. } => f.clone(),
_ => panic!(),
}
}
pub fn get_type(&self) -> DataType {
match self {
&RuntimeExpr::Compiled { ref t, .. } => t.clone(),
&RuntimeExpr::AggregateFunction { ref t, .. } => t.clone(),
}
}
}
/// Compiles a scalar expression into a closure
pub fn compile_expr(
ctx: &ExecutionContext,
expr: &Expr,
input_schema: &Schema,
) -> Result<RuntimeExpr> {
match *expr {
Expr::AggregateFunction {
ref name,
ref args,
ref return_type,
} => {
assert_eq!(1, args.len());
let compiled_args: Result<Vec<RuntimeExpr>> = args
.iter()
.map(|e| compile_scalar_expr(ctx, e, input_schema))
.collect();
let func = match name.to_lowercase().as_ref() {
"min" => AggregateType::Min,
"max" => AggregateType::Max,
"count" => AggregateType::Count,
"sum" => AggregateType::Sum,
_ => unimplemented!("Unsupported aggregate function '{}'", name),
};
//TODO: this is hacky
// let return_type = match func {
// AggregateType::Count => DataType::UInt64,
// AggregateType::Min | AggregateType::Max => match args[0] {
// Expr::Column(i) => input_schema.columns()[i].data_type().clone(),
// _ => {
// //TODO: fix this hack
// DataType::Float64
// //panic!("Aggregate expressions currently only support simple arguments")
// }
// }
// _ => panic!()
// };
Ok(RuntimeExpr::AggregateFunction {
f: func,
args: compiled_args?
.iter()
.map(|e| e.get_func().clone())
.collect(),
t: return_type.clone(),
})
}
_ => Ok(compile_scalar_expr(ctx, expr, input_schema)?),
}
}
macro_rules! cast_primitive {
{$TO:ty, $LIST:expr} => {{
let mut b: Builder<$TO> = Builder::with_capacity($LIST.len() as usize);
for i in 0..$LIST.len() as usize {
b.push(*$LIST.get(i) as $TO)
}
Ok(Value::Column(Rc::new(Array::from(b.finish()))))
}}
}
macro_rules! cast_array_from_to {
{$FROM:ty, $TO:ident, $LIST:expr} => {{
match &$TO {
DataType::UInt8 => cast_primitive!(u8, $LIST),
DataType::UInt16 => cast_primitive!(u16, $LIST),
DataType::UInt32 => cast_primitive!(u32, $LIST),
DataType::UInt64 => cast_primitive!(u64, $LIST),
DataType::Int8 => cast_primitive!(i8, $LIST),
DataType::Int16 => cast_primitive!(i16, $LIST),
DataType::Int32 => cast_primitive!(i32, $LIST),
DataType::Int64 => cast_primitive!(i64, $LIST),
DataType::Float32 => cast_primitive!(f32, $LIST),
DataType::Float64 => cast_primitive!(f64, $LIST),
DataType::Utf8 => {
let mut b: ListBuilder<u8> = ListBuilder::with_capacity($LIST.len() as usize);
for i in 0..$LIST.len() as usize {
let s = format!("{:?}", *$LIST.get(i));
b.push(s.as_bytes());
}
Ok(Value::Column(Rc::new(Array::new($LIST.len() as usize,
ArrayData::Utf8(ListArray::from(b.finish()))))))
},
_ => unimplemented!("CAST from {:?} to {:?}", stringify!($FROM), stringify!($TO))
}
}}
}
macro_rules! cast_utf8_to {
{$TY:ty, $LIST:expr} => {{
let mut b: Builder<$TY> = Builder::with_capacity($LIST.len() as usize);
for i in 0..$LIST.len() as usize {
let x = str::from_utf8($LIST.get(i)).unwrap();
match x.parse::<$TY>() {
Ok(v) => b.push(v),
Err(_) => return Err(ExecutionError::General(format!(
"Cannot cast Utf8 value '{}' to {}", x, stringify!($TY))))
}
}
Ok(Value::Column(Rc::new(Array::from(b.finish()))))
}}
}
fn compile_cast_column(data_type: DataType) -> Result<CompiledCastFunction> {
Ok(Rc::new(move |v: &Value| match v {
Value::Column(ref array) => match array.data() {
&ArrayData::Boolean(_) => unimplemented!("CAST from Boolean"),
&ArrayData::UInt8(ref list) => cast_array_from_to!(u8, data_type, list),
&ArrayData::UInt16(ref list) => cast_array_from_to!(u16, data_type, list),
&ArrayData::UInt32(ref list) => cast_array_from_to!(u32, data_type, list),
&ArrayData::UInt64(ref list) => cast_array_from_to!(u64, data_type, list),
&ArrayData::Int8(ref list) => cast_array_from_to!(i8, data_type, list),
&ArrayData::Int16(ref list) => cast_array_from_to!(i16, data_type, list),
&ArrayData::Int32(ref list) => cast_array_from_to!(i32, data_type, list),
&ArrayData::Int64(ref list) => cast_array_from_to!(i64, data_type, list),
&ArrayData::Float32(ref list) => cast_array_from_to!(f32, data_type, list),
&ArrayData::Float64(ref list) => cast_array_from_to!(f64, data_type, list),
&ArrayData::Struct(_) => unimplemented!("CAST from Struct"),
&ArrayData::Utf8(ref list) => match &data_type {
DataType::Boolean => cast_utf8_to!(bool, list),
DataType::Int8 => cast_utf8_to!(i8, list),
DataType::Int16 => cast_utf8_to!(i16, list),
DataType::Int32 => cast_utf8_to!(i32, list),
DataType::Int64 => cast_utf8_to!(i64, list),
DataType::UInt8 => cast_utf8_to!(u8, list),
DataType::UInt16 => cast_utf8_to!(u16, list),
DataType::UInt32 => cast_utf8_to!(u32, list),
DataType::UInt64 => cast_utf8_to!(u64, list),
DataType::Float32 => cast_utf8_to!(f32, list),
DataType::Float64 => cast_utf8_to!(f64, list),
DataType::Utf8 => Ok(v.clone()),
_ => unimplemented!("CAST from Utf8 to {:?}", data_type),
},
},
_ => unimplemented!("CAST from ScalarValue"),
}))
}
macro_rules! cast_scalar_from_to {
{$SCALAR:expr, $TO:ident} => {{
match &$TO {
DataType::UInt8 => {
let cast_value = *$SCALAR as u8;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::UInt8(cast_value)))) ))
}
DataType::UInt16 => {
let cast_value = *$SCALAR as u16;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::UInt16(cast_value)))) ))
}
DataType::UInt32 => {
let cast_value = *$SCALAR as u32;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::UInt32(cast_value)))) ))
}
DataType::UInt64 => {
let cast_value = *$SCALAR as u64;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::UInt64(cast_value)))) ))
}
DataType::Int8 => {
let cast_value = *$SCALAR as i8;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::Int8(cast_value)))) ))
}
DataType::Int16 => {
let cast_value = *$SCALAR as i16;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::Int16(cast_value)))) ))
}
DataType::Int32 => {
let cast_value = *$SCALAR as i32;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::Int32(cast_value)))) ))
}
DataType::Int64 => {
let cast_value = *$SCALAR as i64;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::Int64(cast_value)))) ))
}
DataType::Float32 => {
let cast_value = *$SCALAR as f32;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::Float32(cast_value)))) ))
}
DataType::Float64 => {
let cast_value = *$SCALAR as f64;
Ok(Rc::new(move |_: &Value|
Ok(Value::Scalar(Rc::new(ScalarValue::Float64(cast_value)))) ))
}
_ => unimplemented!("CAST from {:?} to {:?}", stringify!($SCALAR), stringify!($TO))
}
}}
}
fn compile_cast_scalar(scalar: &ScalarValue, data_type: &DataType) -> Result<CompiledCastFunction> {
match scalar {
ScalarValue::Boolean(_) => unimplemented!("CAST from scalar Boolean"),
ScalarValue::UInt8(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::UInt16(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::UInt32(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::UInt64(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::Int8(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::Int16(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::Int32(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::Int64(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::Float32(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::Float64(v) => cast_scalar_from_to!(v, data_type),
ScalarValue::Utf8(_) => unimplemented!("CAST from scalar Utf8"),
ScalarValue::Struct(_) => unimplemented!("CAST from scalar Struct"),
ScalarValue::Null => unimplemented!("CAST from scalar NULL"),
}
}
//Ok(Rc::new(move |_: &Value|
/// Compiles a scalar expression into a closure
pub fn compile_scalar_expr(
ctx: &ExecutionContext,
expr: &Expr,
input_schema: &Schema,
) -> Result<RuntimeExpr> {
match expr {
&Expr::Literal(ref lit) => {
let literal_value = lit.clone();
Ok(RuntimeExpr::Compiled {
f: Rc::new(move |_| {
// literal values are a bit special - we don't repeat them in a vector
// because it would be redundant, so we have a single value in a vector instead
Ok(Value::Scalar(Rc::new(literal_value.clone())))
}),
t: DataType::Float64, //TODO
})
}
&Expr::Column(index) => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| Ok((*batch.column(index)).clone())),
t: input_schema.column(index).data_type().clone(),
}),
&Expr::Cast {
ref expr,
ref data_type,
} => match expr.as_ref() {
&Expr::Column(index) => {
let compiled_cast_expr = compile_cast_column(data_type.clone())?;
Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
(compiled_cast_expr)(batch.column(index))
}),
t: data_type.clone(),
})
}
&Expr::Literal(ref lit) => {
let compiled_cast_expr = compile_cast_scalar(lit, data_type)?;
Ok(RuntimeExpr::Compiled {
f: Rc::new(move |_: &RecordBatch| {
(compiled_cast_expr)(&Value::Scalar(Rc::new(ScalarValue::Null))) // pointless arg
}),
t: data_type.clone(),
})
}
other => Err(ExecutionError::General(format!(
"CAST not implemented for expression {:?}",
other
))),
},
&Expr::IsNotNull(ref expr) => {
let compiled_expr = compile_scalar_expr(ctx, expr, input_schema)?;
Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = compiled_expr.get_func()(batch)?;
left_values.is_not_null()
}),
t: DataType::Boolean,
})
}
&Expr::IsNull(ref expr) => {
let compiled_expr = compile_scalar_expr(ctx, expr, input_schema)?;
Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = compiled_expr.get_func()(batch)?;
left_values.is_null()
}),
t: DataType::Boolean,
})
}
&Expr::BinaryExpr {
ref left,
ref op,
ref right,
} => {
let left_expr = compile_scalar_expr(ctx, left, input_schema)?;
let right_expr = compile_scalar_expr(ctx, right, input_schema)?;
let op_type = left_expr.get_type().clone();
match op {
&Operator::Eq => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.eq(&right_values)
}),
t: DataType::Boolean,
}),
&Operator::NotEq => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.not_eq(&right_values)
}),
t: DataType::Boolean,
}),
&Operator::Lt => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.lt(&right_values)
}),
t: DataType::Boolean,
}),
&Operator::LtEq => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.lt_eq(&right_values)
}),
t: DataType::Boolean,
}),
&Operator::Gt => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.gt(&right_values)
}),
t: DataType::Boolean,
}),
&Operator::GtEq => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.gt_eq(&right_values)
}),
t: DataType::Boolean,
}),
&Operator::And => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.and(&right_values)
}),
t: DataType::Boolean,
}),
&Operator::Or => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.or(&right_values)
}),
t: DataType::Boolean,
}),
&Operator::Plus => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.add(&right_values)
}),
t: op_type,
}),
&Operator::Minus => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.subtract(&right_values)
}),
t: op_type,
}),
&Operator::Multiply => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.multiply(&right_values)
}),
t: op_type,
}),
&Operator::Divide => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.divide(&right_values)
}),
t: op_type,
}),
&Operator::Modulus => Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch: &RecordBatch| {
let left_values = left_expr.get_func()(batch)?;
let right_values = right_expr.get_func()(batch)?;
left_values.modulo(&right_values)
}),
t: op_type,
}),
}
}
&Expr::Sort { ref expr, .. } => {
//NOTE sort order is ignored here and is handled during sort execution
compile_scalar_expr(ctx, expr, input_schema)
}
&Expr::ScalarFunction {
ref name,
ref args,
ref return_type,
} => {
////println!("Executing function {}", name);
let func = ctx.load_scalar_function(name.as_ref())?;
let expected_args = func.args();
if expected_args.len() != args.len() {
return Err(ExecutionError::General(format!(
"Function {} requires {} parameters but {} were provided",
name,
expected_args.len(),
args.len()
)));
}
// evaluate the arguments to the function
let compiled_args: Result<Vec<RuntimeExpr>> = args
.iter()
.map(|e| compile_scalar_expr(ctx, e, input_schema))
.collect();
let compiled_args_ok = compiled_args?;
// type checking for function arguments
for i in 0..expected_args.len() {
let actual_type = compiled_args_ok[i].get_type();
if expected_args[i].data_type() != &actual_type {
return Err(ExecutionError::General(format!(
"Scalar function {} requires {:?} for argument {} but got {:?}",
name,
expected_args[i].data_type(),
i,
actual_type
)));
}
}
Ok(RuntimeExpr::Compiled {
f: Rc::new(move |batch| {
let arg_values: Result<Vec<Value>> = compiled_args_ok
.iter()
.map(|expr| expr.get_func()(batch))
.collect();
func.execute(&arg_values?)
}),
t: return_type.clone(),
})
}
// aggregate functions don't fit this pattern .. will need to rework this ..
&Expr::AggregateFunction { .. } => panic!("Aggregate expressions cannot be compiled yet"),
// &Expr::AggregateFunction { ref name, ref args } => {
//
// // evaluate the arguments to the function
// let compiled_args: Result<Vec<CompiledExpr>> =
// args.iter().map(|e| compile_expr(ctx, e)).collect();
//
// let compiled_args_ok = compiled_args?;
//
// Ok(Rc::new(move |batch| {
// let arg_values: Result<Vec<Value>> =
// compiled_args_ok.iter().map(|expr| expr(batch)).collect();
//
// Ok(Rc::new(arg_values?))
// }))
// }
}
}
///// Compiled Expression (basically just a closure to evaluate the expression at runtime)
//pub type CompiledAggregatateExpr = Box<Fn(&RecordBatch, ScalarValue) -> Result<ScalarValue>>;
//
///// Compiles an aggregate expression into a closure
//pub fn compile_aggregate_expr(ctx: &ExecutionContext, expr: &Expr) -> Result<CompiledExpr> {
// match
//
//}
/// trait for all relations (a relation is essentially just an iterator over rows with
/// a known schema)
pub trait SimpleRelation {
/// scan all records in this relation
fn scan<'a>(&'a mut self) -> Box<Iterator<Item = Result<Rc<RecordBatch>>> + 'a>;
/// get the schema for this relation
fn schema<'a>(&'a self) -> &'a Schema;
}
struct DataSourceRelation {
schema: Schema,
ds: Rc<RefCell<DataSource>>,
}
impl SimpleRelation for DataSourceRelation {
fn scan<'a>(&'a mut self) -> Box<Iterator<Item = Result<Rc<RecordBatch>>> + 'a> {
Box::new(DataSourceIterator::new(self.ds.clone()))
}
fn schema<'a>(&'a self) -> &'a Schema {
&self.schema
}
}
/// Execution plans are sent to worker nodes for execution
#[derive(Debug, Clone)]
pub enum PhysicalPlan {
/// Run a query and return the results to the client
Interactive {
plan: Rc<LogicalPlan>,
},
/// Execute a logical plan and write the output to a file
Write {
plan: Rc<LogicalPlan>,
filename: String,
kind: String,
},
Show {
plan: Rc<LogicalPlan>,
count: usize,
},
}
#[derive(Debug, Clone)]
pub enum ExecutionResult {
Unit,
Count(usize),
Str(String),
}
struct ExecutionContextSchemaProvider {
tables: Rc<RefCell<HashMap<String, Rc<DataFrame>>>>,
function_meta: Rc<RefCell<HashMap<String, Rc<FunctionMeta>>>>,
}
impl SchemaProvider for ExecutionContextSchemaProvider {
fn get_table_meta(&self, name: &str) -> Option<Rc<Schema>> {
match self.tables.borrow().get(&name.to_string().to_lowercase()) {
Some(table) => Some(table.schema().clone()),
None => None,
}
}
fn get_function_meta(&self, name: &str) -> Option<Rc<FunctionMeta>> {
match self
.function_meta
.borrow()
.get(&name.to_string().to_lowercase())
{
Some(meta) => Some(meta.clone()),
None => None,
}
}
}
#[derive(Clone)]
pub struct ExecutionContext {
tables: Rc<RefCell<HashMap<String, Rc<DataFrame>>>>,
function_meta: Rc<RefCell<HashMap<String, Rc<FunctionMeta>>>>,
functions: Rc<RefCell<HashMap<String, Rc<ScalarFunction>>>>,
config: Rc<DFConfig>,
}
impl ExecutionContext {
fn create_schema_provider(&self) -> Rc<SchemaProvider> {
Rc::new(ExecutionContextSchemaProvider {
tables: self.tables.clone(),
function_meta: self.function_meta.clone(),
})
}
pub fn local() -> Self {
ExecutionContext {
tables: Rc::new(RefCell::new(HashMap::new())),
function_meta: Rc::new(RefCell::new(HashMap::new())),
functions: Rc::new(RefCell::new(HashMap::new())),
config: Rc::new(DFConfig::Local),
}
}
pub fn register_scalar_function(&mut self, func: Rc<ScalarFunction>) {
let fm = FunctionMeta::new(
func.name(),
func.args(),
func.return_type(),
FunctionType::Scalar,
);
self.function_meta
.borrow_mut()
.insert(func.name().to_lowercase(), Rc::new(fm));
self.functions
.borrow_mut()
.insert(func.name().to_lowercase(), func.clone());
}
pub fn create_logical_plan(&self, sql: &str) -> Result<Rc<LogicalPlan>> {
// parse SQL into AST
let ast = Parser::parse_sql(String::from(sql))?;
// create a query planner
let query_planner = SqlToRel::new(self.create_schema_provider());
// plan the query (create a logical relational plan)
Ok(query_planner.sql_to_rel(&ast)?)
}
pub fn register(&mut self, table_name: &str, df: Rc<DataFrame>) {
//println!("Registering table {}", table_name);
self.tables
.borrow_mut()
.insert(table_name.to_string(), df.clone());
}
pub fn sql(&mut self, sql: &str) -> Result<Rc<DataFrame>> {
//println!("sql() {}", sql);
// parse SQL into AST
let ast = Parser::parse_sql(String::from(sql))?;
//println!("AST: {:?}", ast);
match ast {
SQLCreateTable {
name,
columns,
file_type,
header_row,
location,
} => {
let fields: Vec<Field> = columns
.iter()
.map(|c| Field::new(&c.name, convert_data_type(&c.data_type), c.allow_null))
.collect();
let schema = Schema::new(fields);
let df = match file_type {
FileType::CSV => self.load_csv(&location, &schema, header_row, None)?,
FileType::NdJson => self.load_ndjson(&location, &schema, None)?,
FileType::Parquet => self.load_parquet(&location, None)?,
};
self.register(&name, df);
//TODO: not sure what to return here
Ok(Rc::new(DF::new(
self.clone(),
Rc::new(LogicalPlan::EmptyRelation {
schema: Rc::new(Schema::empty()),
}),
)))
}
_ => {
// create a query planner
let query_planner = SqlToRel::new(self.create_schema_provider());
// plan the query (create a logical relational plan)
let plan = query_planner.sql_to_rel(&ast)?;
//println!("Logical plan: {:?}", plan);
let new_plan = push_down_projection(&plan, &HashSet::new());
//println!("Optimized logical plan: {:?}", new_plan);
// return the DataFrame
Ok(Rc::new(DF::new(self.clone(), new_plan)))
}
}
}
/// Open a CSV file
///TODO: this is building a relational plan not an execution plan so shouldn't really be here
pub fn load_csv(
&self,
filename: &str,
schema: &Schema,
has_header: bool,
projection: Option<Vec<usize>>,
) -> Result<Rc<DataFrame>> {
let plan = LogicalPlan::CsvFile {
filename: filename.to_string(),
schema: Rc::new(schema.clone()),
has_header,
projection,
};
Ok(Rc::new(DF::new(self.clone(), Rc::new(plan))))
}
/// Open a CSV file
///TODO: this is building a relational plan not an execution plan so shouldn't really be here
pub fn load_ndjson(
&self,
filename: &str,
schema: &Schema,
projection: Option<Vec<usize>>,
) -> Result<Rc<DataFrame>> {
let plan = LogicalPlan::NdJsonFile {
filename: filename.to_string(),
schema: Rc::new(schema.clone()),
projection,
};
Ok(Rc::new(DF::new(self.clone(), Rc::new(plan))))
}
pub fn load_parquet(
&self,
filename: &str,
projection: Option<Vec<usize>>,
) -> Result<Rc<DataFrame>> {
//TODO: can only get schema by assuming file is local and opening it - need catalog!!
let file = File::open(filename)?;
let p = ParquetFile::open(file, None)?;
let plan = LogicalPlan::ParquetFile {
filename: filename.to_string(),
schema: p.schema().clone(),
projection,
};
Ok(Rc::new(DF::new(self.clone(), Rc::new(plan))))
}
pub fn create_execution_plan(&self, plan: &LogicalPlan) -> Result<Box<SimpleRelation>> {
//println!("Logical plan: {:?}", plan);
match *plan {
LogicalPlan::EmptyRelation { .. } => Ok(Box::new(DataSourceRelation {
schema: Schema::new(vec![]),
ds: Rc::new(RefCell::new(EmptyRelation::new())),
})),
LogicalPlan::Sort { .. } => unimplemented!(),
LogicalPlan::TableScan {
ref table_name,
ref projection,
..
} => {
//println!("TableScan: {}", table_name);
match self.tables.borrow().get(table_name) {
Some(df) => match projection {
Some(p) => {
let mut h: HashSet<usize> = HashSet::new();
p.iter().for_each(|i| {
h.insert(*i);
});
self.create_execution_plan(&push_down_projection(df.plan(), &h))
}
None => self.create_execution_plan(df.plan()),
},
_ => Err(ExecutionError::General(format!(
"No table registered as '{}'",
table_name
))),
}
}
LogicalPlan::CsvFile {
ref filename,
ref schema,
ref has_header,
ref projection,
} => {
let file = File::open(filename)?;
let ds = Rc::new(RefCell::new(CsvFile::open(
file,
schema.clone(),
*has_header,
projection.clone(),
)?)) as Rc<RefCell<DataSource>>;
Ok(Box::new(DataSourceRelation {
schema: schema.as_ref().clone(),
ds,
}))
}
LogicalPlan::NdJsonFile {
ref filename,
ref schema,
ref projection,
} => {
let file = File::open(filename)?;
let ds = Rc::new(RefCell::new(NdJsonFile::open(
file,
schema.clone(),
projection.clone(),
)?)) as Rc<RefCell<DataSource>>;
Ok(Box::new(DataSourceRelation {
schema: schema.as_ref().clone(),
ds,
}))
}
LogicalPlan::ParquetFile {
ref filename,
ref schema,
ref projection,
} => {
let file = File::open(filename)?;
let ds = Rc::new(RefCell::new(ParquetFile::open(file, projection.clone())?))
as Rc<RefCell<DataSource>>;
Ok(Box::new(DataSourceRelation {
schema: schema.as_ref().clone(),
ds,
}))
}
LogicalPlan::Selection {
ref expr,
ref input,
} => {
let input_rel = self.create_execution_plan(input)?;
let runtime_expr = compile_scalar_expr(&self, expr, input_rel.schema())?;
let rel = FilterRelation::new(input_rel, runtime_expr.get_func().clone());
Ok(Box::new(rel))
}
LogicalPlan::Projection {
ref expr,
ref input,
..
} => {
let input_rel = self.create_execution_plan(&input)?;
let project_columns: Vec<Field> = exprlist_to_fields(&expr, input_rel.schema());
let project_schema = Rc::new(Schema::new(project_columns));
let compiled_expr: Result<Vec<RuntimeExpr>> = expr
.iter()
.map(|e| compile_scalar_expr(&self, e, input_rel.schema()))
.collect();
let rel = ProjectRelation::new(input_rel, compiled_expr?, project_schema);
Ok(Box::new(rel))
}
LogicalPlan::Aggregate {
ref input,
ref group_expr,
ref aggr_expr,
..
} => {
let input_rel = self.create_execution_plan(&input)?;
let compiled_group_expr_result: Result<Vec<RuntimeExpr>> = group_expr
.iter()
.map(|e| compile_scalar_expr(&self, e, input_rel.schema()))
.collect();
let compiled_group_expr = compiled_group_expr_result?;
let compiled_aggr_expr_result: Result<Vec<RuntimeExpr>> = aggr_expr
.iter()
.map(|e| compile_expr(&self, e, input.schema()))
.collect();
let compiled_aggr_expr = compiled_aggr_expr_result?;
let rel = AggregateRelation::new(
Rc::new(Schema::empty()), //(expr_to_field(&compiled_group_expr, &input_schema))),
input_rel,
compiled_group_expr,
compiled_aggr_expr,
);
Ok(Box::new(rel))
}
//LogicalPlan::Sort { .. /*ref expr, ref input, ref schema*/ } => {
// let input_rel = self.create_execution_plan(data_dir, input)?;
//
// let compiled_expr : Result<Vec<CompiledExpr>> = expr.iter()
// .map(|e| compile_expr(&self,e))
// .collect();
//
// let sort_asc : Vec<bool> = expr.iter()
// .map(|e| match e {
// &Expr::Sort { asc, .. } => asc,
// _ => panic!()
// })
// .collect();
//
// let rel = SortRelation {
// input: input_rel,
// sort_expr: compiled_expr?,
// sort_asc: sort_asc,
// schema: schema.clone()
// };
// Ok(Box::new(rel))
// },
//}
LogicalPlan::Limit {
limit,
ref input,
ref schema,
..
} => {
let input_rel = self.create_execution_plan(input)?;
let rel = LimitRelation::new(schema.clone(), input_rel, limit);
Ok(Box::new(rel))
}
}
}
/// load a scalar function implementation
fn load_scalar_function(&self, function_name: &str) -> Result<Rc<ScalarFunction>> {
match self.functions.borrow().get(&function_name.to_lowercase()) {
Some(f) => Ok(f.clone()),
_ => Err(ExecutionError::General(format!(
"Unknown scalar function {}",
function_name
))),
}
}
/// load an aggregate function implementation
// fn load_aggregate_function(
// &self,
// function_name: &str,
// ) -> Result<Rc<AggregateFunction>> {
// match self.aggregate_functions.borrow().get(&function_name.to_lowercase()) {
// Some(f) => Ok(f.clone()),
// _ => Err(>ExecutionError::General(format!(
// "Unknown aggregate function {}",
// function_name
// ))),
// }
// }
pub fn udf(&self, name: &str, args: Vec<Expr>, return_type: DataType) -> Expr {
Expr::ScalarFunction {
name: name.to_string(),
args: args.clone(),
return_type: return_type.clone(),
}
}
pub fn show(&self, df: &DataFrame, count: usize) -> Result<usize> {
//println!("show()");
let physical_plan = PhysicalPlan::Show {
plan: df.plan().clone(),
count,
};
match self.execute(&physical_plan)? {
ExecutionResult::Count(count) => Ok(count),
_ => Err(ExecutionError::General(
"Unexpected result in show".to_string(),
)),
}
}
pub fn write_csv(&self, df: Rc<DataFrame>, filename: &str) -> Result<usize> {
let physical_plan = PhysicalPlan::Write {
plan: df.plan().clone(),
filename: filename.to_string(),
kind: "csv".to_string(),
};
match self.execute(&physical_plan)? {
ExecutionResult::Count(count) => Ok(count),
_ => Err(ExecutionError::General(
"Unexpected result in write_csv".to_string(),
)),
}
}
pub fn write_string(&self, df: Rc<DataFrame>) -> Result<String> {
let physical_plan = PhysicalPlan::Write {
plan: df.plan().clone(),
filename: String::new(),
kind: "string".to_string(),
};
match self.execute(&physical_plan)? {
ExecutionResult::Str(s) => Ok(s),
_ => Err(ExecutionError::General(
"Unexpected result in write_string".to_string(),
)),
}
}
pub fn execute(&self, physical_plan: &PhysicalPlan) -> Result<ExecutionResult> {
//println!("execute()");
match &self.config.as_ref() {
&DFConfig::Local => {
//TODO error handling
match self.execute_local(physical_plan) {
Ok(r) => Ok(r),
Err(e) => Err(ExecutionError::General(format!(
"execution failed: {:?}", | }
&DFConfig::Remote { ref etcd } => self.execute_remote(physical_plan, etcd.clone()),
}
}
fn execute_local(&self, physical_plan: &PhysicalPlan) -> Result<ExecutionResult> {
//println!("execute_local()");
match physical_plan {
&PhysicalPlan::Interactive { ref plan } => {
let mut execution_plan = self.create_execution_plan(plan)?;
// implement execution here for now but should be a common method for processing a plan
let it = execution_plan.scan();
it.for_each(|t| {
match t {
Ok(ref batch) => {
////println!("Processing batch of {} rows", batch.row_count());
for i in 0..batch.num_rows() {
let row = batch.row_slice(i);
let csv = row
.into_iter()
.map(|v| v.to_string())
.collect::<Vec<String>>()
.join(",");
println!("{}", csv);
}
}
Err(e) => panic!(format!("Error processing row: {:?}", e)), //TODO: error handling
}
});
Ok(ExecutionResult::Count(0))
}
&PhysicalPlan::Write {
ref plan,
ref filename,
ref kind,
} => {
// create output file
// //println!("Writing csv to {}", filename);
match kind.as_ref() {
"csv" => {
let file = File::create(filename)?;
let mut w = CsvWriter {
w: BufWriter::with_capacity(8 * 1024 * 1024, file),
};
let mut execution_plan = self.create_execution_plan(plan)?;
// implement execution here for now but should be a common method for processing a plan
let it = execution_plan.scan();
let mut count: usize = 0;
it.for_each(|t| {
match t {
Ok(ref batch) => {
////println!("Processing batch of {} rows", batch.row_count());
for i in 0..batch.num_rows() {
for j in 0..batch.num_columns() {
if j > 0 {
w.write_bytes(b",");
}
match *batch.column(j) {
Value::Scalar(ref v) => w.write_scalar(v),
Value::Column(ref v) => match v.data() {
ArrayData::Boolean(ref v) => {
w.write_bool(v.get(i))
}
ArrayData::Float32(ref v) => {
w.write_f32(v.get(i))
}
ArrayData::Float64(ref v) => {
w.write_f64(v.get(i))
}
ArrayData::Int8(ref v) => w.write_i8(v.get(i)),
ArrayData::Int16(ref v) => {
w.write_i16(v.get(i))
}
ArrayData::Int32(ref v) => {
w.write_i32(v.get(i))
}
ArrayData::Int64(ref v) => {
w.write_i64(v.get(i))
}
ArrayData::UInt8(ref v) => w.write_u8(v.get(i)),
ArrayData::UInt16(ref v) => {
w.write_u16(v.get(i))
}
ArrayData::UInt32(ref v) => {
w.write_u32(v.get(i))
}
ArrayData::UInt64(ref v) => {
w.write_u64(v.get(i))
}
ArrayData::Utf8(ref data) => {
w.write_bytes(data.get(i))
}
ArrayData::Struct(ref v) => {
let fields = v
.iter()
.map(|arr| get_value(&arr, i))
.collect();
w.write_bytes(
format!("{}", ScalarValue::Struct(fields))
.as_bytes(),
);
}
},
}
}
w.write_bytes(b"\n");
count += 1;
}
}
Err(e) => panic!(format!("Error processing row: {:?}", e)), //TODO: error handling
}
});
Ok(ExecutionResult::Count(count))
}
"string" => {
let mut execution_plan = self.create_execution_plan(plan)?;
let it = execution_plan.scan();
let mut result = String::new();
it.for_each(|t| match t {
Ok(ref batch) => {
for i in 0..batch.num_rows() {
let results = batch
.row_slice(i)
.into_iter()
.map(|v| v.to_string())
.collect::<Vec<String>>()
.join(",");
result.push_str(&results);
result.push_str("\n")
}
}
Err(e) => panic!(format!("Error processing row: {:?}", e)),
});
Ok(ExecutionResult::Str(result))
}
ref _x => panic!("Unknown physical plan output type."),
}
}
&PhysicalPlan::Show {
ref plan,
ref count,
} => {
let mut execution_plan = self.create_execution_plan(plan)?;
// implement execution here for now but should be a common method for processing a plan
let it = execution_plan.scan().take(*count);
it.for_each(|t| {
match t {
Ok(ref batch) => {
////println!("Processing batch of {} rows", batch.row_count());
for i in 0..*count {
if i < batch.num_rows() {
let row = batch.row_slice(i);
let csv = row
.into_iter()
.map(|v| v.to_string())
.collect::<Vec<String>>()
.join(",");
println!("{}", csv);
}
}
}
Err(e) => panic!(format!("Error processing row: {:?}", e)), //TODO: error handling
}
});
Ok(ExecutionResult::Count(*count))
}
}
}
fn execute_remote(
&self,
_physical_plan: &PhysicalPlan,
_etcd: String,
) -> Result<ExecutionResult> {
Err(ExecutionError::General(format!(
"Remote execution needs re-implementing since moving to Arrow"
)))
}
// let workers = get_worker_list(&etcd);
//
// match workers {
// Ok(ref list) if list.len() > 0 => {
// let worker_uri = format!("http://{}", list[0]);
// match worker_uri.parse() {
// Ok(uri) => {
//
// let mut core = Core::new().unwrap();
// let client = Client::new(&core.handle());
//
// // serialize plan to JSON
// match serde_json::to_string(&physical_plan) {
// Ok(json) => {
// let mut req = Request::new(Method::Post, uri);
// req.headers_mut().set(ContentType::json());
// req.headers_mut().set(ContentLength(json.len() as u64));
// req.set_body(json);
//
// let post = client.request(req).and_then(|res| {
// ////println!("POST: {}", res.status());
// res.body().concat2()
// });
//
// match core.run(post) {
// Ok(result) => {
// //TODO: parse result
// let result = str::from_utf8(&result).unwrap();
// //println!("{}", result);
// Ok(ExecutionResult::Unit)
// }
// Err(e) => Err(>ExecutionError::General(format!("error: {}", e)))
// }
// }
// Err(e) => Err(>ExecutionError::General(format!("error: {}", e)))
// }
//
//
// }
// Err(e) => Err(>ExecutionError::General(format!("error: {}", e)))
// }
// }
// Ok(_) => Err(>ExecutionError::General(format!("No workers found in cluster"))),
// Err(e) => Err(>ExecutionError::General(format!("Failed to find a worker node: {}", e)))
// }
// }
}
#[cfg(test)]
mod tests {
use super::super::functions::geospatial::st_astext::*;
use super::super::functions::geospatial::st_point::*;
use super::super::functions::math::*;
use super::*;
use std::fs::File;
use std::io::prelude::*;
#[test]
fn test_dataframe_show() {
let mut ctx = create_context();
let df = ctx.sql(&"SELECT city, lat, lng FROM uk_cities").unwrap();
df.show(10);
}
#[test]
fn test_dataframe_select() {
let mut ctx = create_context();
let df = ctx.sql(&"SELECT city, lat, lng FROM uk_cities").unwrap();
let df2 = df.select(vec![Expr::Column(1)]).unwrap();
assert_eq!(1, df2.schema().columns().len());
}
#[test]
fn test_dataframe_filter() {
let mut ctx = create_context();
let df = ctx.sql(&"SELECT city, lat, lng FROM uk_cities").unwrap();
let df2 =
df.filter(Expr::BinaryExpr {
left: Rc::new(Expr::Column(1)),
op: Operator::Lt,
right: Rc::new(Expr::Literal(ScalarValue::Float64(52.1))),
}).unwrap();
df2.show(10);
//TODO assertions
}
#[test]
fn test_dataframe_col() {
let mut ctx = create_context();
let df = ctx.sql(&"SELECT city, lat, lng FROM uk_cities").unwrap();
assert_eq!(Expr::Column(2), df.col("lng").unwrap());
}
#[test]
fn test_dataframe_col_not_found() {
let mut ctx = create_context();
let df = ctx.sql(&"SELECT city, lat, lng FROM uk_cities").unwrap();
assert!(df.col("banana").is_err());
}
#[test]
fn test_dataframe_plan() {
let mut ctx = create_context();
let df = ctx.sql(&"SELECT city, lat, lng FROM uk_cities").unwrap();
let plan = df.plan();
assert_eq!(
"Projection: #0, #1, #2\
\n TableScan: uk_cities projection=None",
format!("{:?}", plan)
);
}
#[test]
fn test_create_external_table() {
let mut ctx = ExecutionContext::local();
let sql = "CREATE EXTERNAL TABLE new_uk_cities (\
city VARCHAR(100), \
lat DOUBLE, \
lng DOUBLE) \
STORED AS CSV \
WITHOUT HEADER ROW \
LOCATION 'test/data/uk_cities.csv";
ctx.sql(sql).unwrap();
let df = ctx.sql("SELECT city, lat, lng FROM new_uk_cities").unwrap();
//TODO: assertions
df.show(10);
}
#[test]
fn test_create_logical_plan() {
let mut ctx = create_context();
ctx.register_scalar_function(Rc::new(SqrtFunction {}));
let plan = ctx
.create_logical_plan(&"SELECT id, sqrt(id) FROM people")
.unwrap();
let expected_plan = "Projection: #0, sqrt(CAST(#0 AS Float64))\
\n TableScan: people projection=None";
assert_eq!(expected_plan, format!("{:?}", plan));
}
#[test]
fn test_sqrt() {
let mut ctx = create_context();
ctx.register_scalar_function(Rc::new(SqrtFunction {}));
let df = ctx.sql(&"SELECT id, sqrt(id) FROM people").unwrap();
ctx.write_csv(df, "./target/test_sqrt.csv").unwrap();
let expected_result = read_file("test/data/expected/test_sqrt.csv");
assert_eq!(expected_result, read_file("./target/test_sqrt.csv"));
}
#[test]
fn test_sql_udf_udt() {
let mut ctx = create_context();
ctx.register_scalar_function(Rc::new(STPointFunc {}));
let df = ctx
.sql(&"SELECT ST_Point(lat, lng) FROM uk_cities")
.unwrap();
ctx.write_csv(df, "./target/test_sql_udf_udt.csv").unwrap();
let expected_result = read_file("test/data/expected/test_sql_udf_udt.csv");
assert_eq!(expected_result, read_file("./target/test_sql_udf_udt.csv"));
}
#[test]
fn test_limit() {
let mut ctx = create_context();
ctx.register_scalar_function(Rc::new(SqrtFunction {}));
let df = ctx.sql(&"SELECT id, sqrt(id) FROM people LIMIT 5").unwrap();
ctx.write_csv(df, "./target/test_limit.csv").unwrap();
let expected_result = read_file("test/data/expected/test_limit.csv");
assert_eq!(expected_result, read_file("./target/test_limit.csv"));
}
#[test]
fn test_df_udf_udt() {
let mut ctx = create_context();
ctx.register_scalar_function(Rc::new(STPointFunc {}));
let schema = Schema::new(vec![
Field::new("city", DataType::Utf8, false),
Field::new("lat", DataType::Float64, false),
Field::new("lng", DataType::Float64, false),
]);
let df = ctx
.load_csv("test/data/uk_cities.csv", &schema, false, None)
.unwrap();
// invoke custom code as a scalar UDF
let func_expr = ctx.udf(
"ST_Point",
vec![df.col("lat").unwrap(), df.col("lng").unwrap()],
DataType::Struct(vec![
Field::new("lat", DataType::Float64, false),
Field::new("lng", DataType::Float64, false),
]),
);
let df2 = df.select(vec![func_expr]).unwrap();
ctx.write_csv(df2, "./target/test_df_udf_udt.csv").unwrap();
let expected_result = read_file("test/data/expected/test_df_udf_udt.csv");
assert_eq!(expected_result, read_file("./target/test_df_udf_udt.csv"));
}
#[test]
fn test_filter() {
let mut ctx = create_context();
ctx.register_scalar_function(Rc::new(STPointFunc {}));
let schema = Schema::new(vec![
Field::new("city", DataType::Utf8, false),
Field::new("lat", DataType::Float64, false),
Field::new("lng", DataType::Float64, false),
]);
let df = ctx
.load_csv("test/data/uk_cities.csv", &schema, false, None)
.unwrap();
// filter by lat
let df2 =
df.filter(Expr::BinaryExpr {
left: Rc::new(Expr::Column(1)), // lat
op: Operator::Gt,
right: Rc::new(Expr::Literal(ScalarValue::Float64(52.0))),
}).unwrap();
ctx.write_csv(df2, "./target/test_filter.csv").unwrap();
let expected_result = read_file("test/data/expected/test_filter.csv");
assert_eq!(expected_result, read_file("./target/test_filter.csv"));
}
/*
#[test]
fn test_sort() {
let mut ctx = create_context();
ctx.define_function(&STPointFunc {});
let schema = Schema::new(vec![
Field::new("city", DataType::String, false),
Field::new("lat", DataType::Double, false),
Field::new("lng", DataType::Double, false)]);
let df = ctx.load("test/data/uk_cities.csv", &schema).unwrap();
// sort by lat, lng ascending
let df2 = df.sort(vec![
Expr::Sort { expr: Box::new(Expr::Column(1)), asc: true },
Expr::Sort { expr: Box::new(Expr::Column(2)), asc: true }
]).unwrap();
ctx.write(df2,"./target/uk_cities_sorted_by_lat_lng.csv").unwrap();
//TODO: check that generated file has expected contents
}
*/
#[test]
fn test_chaining_functions() {
let mut ctx = create_context();
ctx.register_scalar_function(Rc::new(STPointFunc {}));
ctx.register_scalar_function(Rc::new(STAsText {}));
let df = ctx
.sql(&"SELECT ST_AsText(ST_Point(lat, lng)) FROM uk_cities")
.unwrap();
ctx.write_csv(df, "./target/test_chaining_functions.csv")
.unwrap();
let expected_result = read_file("test/data/expected/test_chaining_functions.csv");
assert_eq!(
expected_result,
read_file("./target/test_chaining_functions.csv")
);
}
#[test]
fn test_simple_predicate() {
// create execution context
let mut ctx = ExecutionContext::local();
ctx.register_scalar_function(Rc::new(STPointFunc {}));
ctx.register_scalar_function(Rc::new(STAsText {}));
// define an external table (csv file)
// ctx.sql(
// "CREATE EXTERNAL TABLE uk_cities (\
// city VARCHAR(100), \
// lat DOUBLE, \
// lng DOUBLE)",
// ).unwrap();
let schema = Schema::new(vec![
Field::new("city", DataType::Utf8, false),
Field::new("lat", DataType::Float64, false),
Field::new("lng", DataType::Float64, false),
]);
let df = ctx
.load_csv("./test/data/uk_cities.csv", &schema, false, None)
.unwrap();
ctx.register("uk_cities", df);
// define the SQL statement
let sql = "SELECT ST_AsText(ST_Point(lat, lng)) FROM uk_cities WHERE lat < 53.0";
// create a data frame
let df1 = ctx.sql(&sql).unwrap();
// write the results to a file
ctx.write_csv(df1, "./target/test_simple_predicate.csv")
.unwrap();
let expected_result = read_file("test/data/expected/test_simple_predicate.csv");
assert_eq!(
expected_result,
read_file("./target/test_simple_predicate.csv")
);
}
#[test]
fn test_sql_min_max() {
// create execution context
let mut ctx = ExecutionContext::local();
let schema = Schema::new(vec![
Field::new("city", DataType::Utf8, false),
Field::new("lat", DataType::Float64, false),
Field::new("lng", DataType::Float64, false),
]);
let df = ctx
.load_csv("./test/data/uk_cities.csv", &schema, false, None)
.unwrap();
ctx.register("uk_cities", df);
// define the SQL statement
let sql = "SELECT MIN(lat), MAX(lat), MIN(lng), MAX(lng) FROM uk_cities";
// create a data frame
let df1 = ctx.sql(&sql).unwrap();
// write the results to a file
ctx.write_csv(df1, "./target/test_sql_min_max.csv").unwrap();
let expected_result = read_file("test/data/expected/test_sql_min_max.csv");
assert_eq!(expected_result, read_file("./target/test_sql_min_max.csv"));
}
#[test]
fn test_is_null_csv() {
// create execution context
let mut ctx = ExecutionContext::local();
let schema = Schema::new(vec![
Field::new("c_int", DataType::UInt32, false),
Field::new("c_float", DataType::Float64, false),
Field::new("c_string", DataType::Utf8, false),
]);
let df = ctx
.load_csv("./test/data/null_test.csv", &schema, true, None)
.unwrap();
ctx.register("null_test", df);
// define the SQL statement
let sql = "SELECT c_int FROM null_test WHERE c_float IS NULL"; // OR c_string IS NULL
// create a data frame
let df1 = ctx.sql(&sql).unwrap();
// write the results to a file
ctx.write_csv(df1, "./target/is_null_csv.csv").unwrap();
let expected_result = read_file("test/data/expected/is_null_csv.csv");
assert_eq!(expected_result, read_file("./target/is_null_csv.csv"));
}
#[test]
fn test_is_not_null_csv() {
// create execution context
let mut ctx = ExecutionContext::local();
let schema = Schema::new(vec![
Field::new("c_int", DataType::UInt32, false),
Field::new("c_float", DataType::Float64, false),
Field::new("c_string", DataType::Utf8, false),
]);
let df = ctx
.load_csv("./test/data/null_test.csv", &schema, true, None)
.unwrap();
ctx.register("null_test", df);
// define the SQL statement
let sql = "SELECT c_int FROM null_test WHERE c_float IS NOT NULL";
// create a data frame
let df1 = ctx.sql(&sql).unwrap();
// write the results to a file
ctx.write_csv(df1, "./target/is_not_null_csv.csv").unwrap();
let expected_result = read_file("test/data/expected/is_not_null_csv.csv");
assert_eq!(expected_result, read_file("./target/is_not_null_csv.csv"));
}
#[test]
fn test_cast() {
// create execution context
let mut ctx = ExecutionContext::local();
let schema = Schema::new(vec![
Field::new("c_int", DataType::UInt32, false),
Field::new("c_float", DataType::Float64, false),
Field::new("c_string", DataType::Utf8, false),
]);
let df = ctx
.load_csv("./test/data/all_types.csv", &schema, true, None)
.unwrap();
ctx.register("all_types", df);
// define the SQL statement
let sql = "SELECT \
CAST(c_int AS INT), CAST(c_int AS FLOAT), CAST(c_int AS STRING), \
CAST(c_float AS INT), CAST(c_float AS FLOAT), CAST(c_float AS STRING), \
CAST(c_string AS FLOAT), CAST(c_string AS STRING) \
FROM all_types";
// create a data frame
let df1 = ctx.sql(&sql).unwrap();
// write the results to a file
ctx.write_csv(df1, "./target/test_cast.csv").unwrap();
let expected_result = read_file("test/data/expected/test_cast.csv");
assert_eq!(expected_result, read_file("./target/test_cast.csv"));
}
#[test]
fn test_select_no_relation() {
let mut ctx = ExecutionContext::local();
let df = ctx.sql("SELECT 1+1").unwrap();
let s = ctx.write_string(df).unwrap();
assert_eq!("2\n", &s);
}
fn read_file(filename: &str) -> String {
let mut file = File::open(filename).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
contents
}
fn create_context() -> ExecutionContext {
// create execution context
let mut ctx = ExecutionContext::local();
let people =
ctx.load_csv(
"./test/data/people.csv",
&Schema::new(vec![
Field::new("id", DataType::Int32, false),
Field::new("name", DataType::Utf8, false),
]),
true,
None,
).unwrap();
ctx.register("people", people);
let uk_cities =
ctx.load_csv(
"./test/data/uk_cities.csv",
&Schema::new(vec![
Field::new("city", DataType::Utf8, false),
Field::new("lat", DataType::Float64, false),
Field::new("lng", DataType::Float64, false),
]),
false,
None,
).unwrap();
ctx.register("uk_cities", uk_cities);
ctx
}
} | e
))),
} |
pack.ts | import * as coda from '@codahq/packs-sdk'
function | (message: string): string {
return message.replace(/\n/g, '\\n')
}
export const pack = coda.newPack()
pack.addNetworkDomain('googleapis.com')
pack.addFormula({
name: 'SendToWebhook',
description: "Send message to Google Chat's webhook",
isAction: true,
resultType: coda.ValueType.String,
parameters: [
coda.makeParameter({
type: coda.ParameterType.String,
name: 'webhookUrl',
description: 'URL for webhook (should begins with `https://chat.googleapis.com`)',
}),
coda.makeParameter({
type: coda.ParameterType.String,
name: 'message',
description: 'Message to send (as plain text or JSON format)',
}),
coda.makeParameter({
type: coda.ParameterType.String,
name: 'threadKey',
description: 'Thread key in order to send multiple messages to the same thread',
optional: true,
}),
],
execute: async ([webhookUrl, message, threadKey], context) => {
const isJSON = message.startsWith('{') && message.endsWith('}')
const payload = isJSON ? JSON.parse(replaceNewline(message)) : { text: message }
if (threadKey) {
webhookUrl = coda.withQueryParams(webhookUrl, {
threadKey,
})
}
const response = await context.fetcher.fetch({
method: 'POST',
url: webhookUrl,
cacheTtlSecs: 0,
body: JSON.stringify(payload),
})
return response.body
},
})
| replaceNewline |
spotify_main_app.py | # Main dashboard screen
# importing libraries
import dash
from dash import dcc
from dash import html
from dash.dependencies import Input, Output, State
# dash app
app = dash.Dash(__name__) | app.layout = html.Div(
children=[
html.H1(
children='Spotify Analysis'
),
html.Div(
children='''
Dashboard for songs listening pattern.
'''
),
]
)
if __name__ == '__main__':
app.run_server(debug=True) |
# Dash layout |
diff_chain.rs | // Copyright 2020-2021 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use core::fmt::Display;
use core::fmt::Error as FmtError;
use core::fmt::Formatter;
use core::fmt::Result as FmtResult;
use core::slice::Iter;
use serde;
use serde::Deserialize;
use serde::Serialize;
use identity_core::convert::ToJson;
use crate::chain::milestone::sort_by_milestone;
use crate::chain::IntegrationChain;
use crate::did::IotaDID;
use crate::document::DiffMessage;
use crate::document::IotaDocument;
use crate::error::Error;
use crate::error::Result;
use crate::tangle::Client;
use crate::tangle::Message;
use crate::tangle::MessageExt;
use crate::tangle::MessageId;
use crate::tangle::MessageIdExt;
use crate::tangle::MessageIndex;
use crate::tangle::TangleRef;
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(transparent)]
pub struct DiffChain {
inner: Vec<DiffMessage>,
}
impl DiffChain {
/// Constructs a new [`DiffChain`] for the given [`IntegrationChain`] from a slice of [`Messages`][Message].
pub async fn try_from_messages(
integration_chain: &IntegrationChain,
messages: &[Message],
client: &Client,
) -> Result<Self> {
let did: &IotaDID = integration_chain.current().id();
let index: MessageIndex<DiffMessage> = messages
.iter()
.flat_map(|message| message.try_extract_diff(did))
.collect();
log::debug!("[Diff] Valid Messages = {}/{}", messages.len(), index.len());
Ok(Self::try_from_index(integration_chain, index, client).await?)
}
/// Constructs a new [`DiffChain`] for the given [`IntegrationChain`] from the given [`MessageIndex`].
pub async fn try_from_index(
integration_chain: &IntegrationChain,
index: MessageIndex<DiffMessage>,
client: &Client,
) -> Result<Self> {
log::trace!("[Diff] Message Index = {:#?}", index);
Self::try_from_index_with_document(integration_chain.current(), index, client).await
}
/// Constructs a new [`DiffChain`] from the given [`MessageIndex`], using an integration document
/// to validate.
pub(in crate::chain) async fn try_from_index_with_document(
integration_document: &IotaDocument,
mut index: MessageIndex<DiffMessage>,
client: &Client,
) -> Result<Self> {
if index.is_empty() {
return Ok(Self::new());
}
let mut this: Self = Self::new();
while let Some(diffs) = index.remove(
this
.current_message_id()
.unwrap_or_else(|| integration_document.message_id()),
) {
// Extract valid diffs.
let expected_prev_message_id: &MessageId = this
.current_message_id()
.unwrap_or_else(|| integration_document.message_id());
let valid_diffs: Vec<DiffMessage> = diffs
.into_iter()
.filter(|diff| Self::check_valid_addition(diff, integration_document, expected_prev_message_id).is_ok())
.collect();
// Sort and push the diff referenced by the oldest milestone.
if let Some(next) = sort_by_milestone(valid_diffs, client).await?.into_iter().next() {
this.push_unchecked(next); // checked by check_valid_addition above
}
// If no diff is appended, the chain ends.
}
Ok(this)
}
/// Creates a new [`DiffChain`].
pub fn new() -> Self {
Self { inner: Vec::new() }
}
/// Returns the total number of diffs.
pub fn len(&self) -> usize {
self.inner.len()
}
/// Returns `true` if the [`DiffChain`] is empty.
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
/// Empties the [`DiffChain`], removing all diffs.
pub fn clear(&mut self) {
self.inner.clear();
}
/// Returns an iterator yielding references to [`DiffMessages`][DiffMessage].
pub fn iter(&self) -> Iter<'_, DiffMessage> {
self.inner.iter()
}
/// Returns the [`MessageId`] of the latest diff in the chain, if any.
pub fn current_message_id(&self) -> Option<&MessageId> {
self.inner.last().map(|diff| diff.message_id())
}
/// Adds a new diff to the [`DiffChain`].
///
/// # Errors
///
/// Fails if the diff signature is invalid or the Tangle message
/// references within the diff are invalid.
pub fn try_push(&mut self, diff: DiffMessage, integration_chain: &IntegrationChain) -> Result<()> {
let document: &IotaDocument = integration_chain.current();
let expected_prev_message_id: &MessageId = self.current_message_id().unwrap_or_else(|| document.message_id());
Self::check_valid_addition(&diff, document, expected_prev_message_id)?;
self.push_unchecked(diff);
Ok(())
}
/// Adds a new diff to the [`DiffChain`] without performing validation checks on the signature or Tangle references
/// of the [`DiffMessage`].
fn push_unchecked(&mut self, diff: DiffMessage) {
self.inner.push(diff);
}
/// Checks if the [`DiffMessage`] can be added to the [`DiffChain`].
///
/// # Errors
///
/// Fails if the [`DiffMessage`] is not a valid addition.
pub fn check_valid_addition(
diff: &DiffMessage,
document: &IotaDocument,
expected_prev_message_id: &MessageId,
) -> Result<()> {
if document.id() != &diff.did {
return Err(Error::ChainError { error: "Invalid DID" });
}
if diff.message_id().is_null() {
return Err(Error::ChainError {
error: "Invalid Message Id",
});
}
if diff.previous_message_id().is_null() {
return Err(Error::ChainError {
error: "Invalid Previous Message Id",
});
}
if diff.previous_message_id() != expected_prev_message_id {
return Err(Error::ChainError {
error: "Invalid Previous Message Id",
});
}
if document.verify_diff(diff).is_err() {
return Err(Error::ChainError {
error: "Invalid Signature",
});
}
Ok(())
}
}
impl Default for DiffChain {
fn default() -> Self {
Self::new()
}
}
impl Display for DiffChain {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
if f.alternate() {
f.write_str(&self.to_json_pretty().map_err(|_| FmtError)?)
} else {
f.write_str(&self.to_json().map_err(|_| FmtError)?)
}
}
}
impl From<DiffChain> for Vec<DiffMessage> {
fn from(diff_chain: DiffChain) -> Self {
diff_chain.inner
}
}
#[cfg(test)]
mod test {
use identity_core::common::Timestamp;
use identity_core::crypto::KeyPair;
use identity_did::did::CoreDIDUrl;
use identity_did::did::DID;
use identity_did::verification::MethodBuilder;
use identity_did::verification::MethodData;
use identity_did::verification::MethodRef;
use identity_did::verification::MethodType;
use crate::chain::DocumentChain;
use crate::chain::IntegrationChain;
use crate::document::DiffMessage;
use crate::document::IotaDocument;
use crate::tangle::MessageId;
use crate::tangle::TangleRef; | #[test]
fn test_diff_chain() {
let mut chain: DocumentChain;
let mut keys: Vec<KeyPair> = Vec::new();
// =========================================================================
// Create Initial Document
// =========================================================================
{
let keypair: KeyPair = KeyPair::new_ed25519().unwrap();
let mut document: IotaDocument = IotaDocument::new(&keypair).unwrap();
document
.sign_self(keypair.private(), &document.default_signing_method().unwrap().id())
.unwrap();
document.set_message_id(MessageId::new([8; 32]));
chain = DocumentChain::new(IntegrationChain::new(document).unwrap());
keys.push(keypair);
}
assert_eq!(
chain.current().proof().unwrap().verification_method(),
format!("#{}", IotaDocument::DEFAULT_METHOD_FRAGMENT)
);
// =========================================================================
// Push Integration Chain Update
// =========================================================================
{
let mut new: IotaDocument = chain.current().clone();
let keypair: KeyPair = KeyPair::new_ed25519().unwrap();
// Replace the capability invocation signing key (one step key rotation).
let signing_method: MethodRef = MethodBuilder::default()
.id(CoreDIDUrl::from(chain.id().to_url().join("#key-2").unwrap()))
.controller(chain.id().clone().into())
.key_type(MethodType::Ed25519VerificationKey2018)
.key_data(MethodData::new_multibase(keypair.public()))
.build()
.map(Into::into)
.unwrap();
unsafe {
new.as_document_mut().capability_invocation_mut().clear();
new.as_document_mut().capability_invocation_mut().append(signing_method);
}
new.set_updated(Timestamp::now_utc());
new.set_previous_message_id(*chain.integration_message_id());
// Sign the update using the old document.
assert!(chain
.current()
.sign_data(
&mut new,
keys[0].private(),
chain.current().default_signing_method().unwrap().id()
)
.is_ok());
assert_eq!(
chain.current().proof().unwrap().verification_method(),
format!("#{}", IotaDocument::DEFAULT_METHOD_FRAGMENT)
);
keys.push(keypair);
assert!(chain.try_push_integration(new).is_ok());
}
// =========================================================================
// Push Diff Chain Update
// =========================================================================
{
let new: IotaDocument = {
let mut this: IotaDocument = chain.current().clone();
this.properties_mut().insert("foo".into(), 123.into());
this.properties_mut().insert("bar".into(), 456.into());
this.set_updated(Timestamp::now_utc());
this
};
// Sign using the new key added in the previous integration chain update.
let message_id = *chain.diff_message_id();
let mut diff: DiffMessage = chain
.current()
.diff(&new, message_id, keys[1].private(), "#key-2")
.unwrap();
diff.set_message_id(message_id);
assert!(chain.try_push_diff(diff).is_ok());
}
}
} | |
test_owe.py | # Test cases for Opportunistic Wireless Encryption (OWE)
# Copyright (c) 2017, Jouni Malinen <[email protected]>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import binascii
import logging
logger = logging.getLogger()
import time
import os
import struct
import hostapd
from wpasupplicant import WpaSupplicant
import hwsim_utils
from tshark import run_tshark
from utils import HwsimSkip, fail_test, alloc_fail, wait_fail_trigger
from test_ap_acs import wait_acs
def test_owe(dev, apdev):
"""Opportunistic Wireless Encryption"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
conf = hapd.request("GET_CONFIG")
if "key_mgmt=OWE" not in conf.splitlines():
logger.info("GET_CONFIG:\n" + conf)
raise Exception("GET_CONFIG did not report correct key_mgmt")
dev[0].scan_for_bss(bssid, freq="2412")
bss = dev[0].get_bss(bssid)
if "[WPA2-OWE-CCMP]" not in bss['flags']:
raise Exception("OWE AKM not recognized: " + bss['flags'])
id = dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2", scan_freq="2412")
hapd.wait_sta()
pmk_h = hapd.request("GET_PMK " + dev[0].own_addr())
pmk_w = dev[0].get_pmk(id)
if pmk_h != pmk_w:
raise Exception("Fetched PMK does not match: hostapd %s, wpa_supplicant %s" % (pmk_h, pmk_w))
hwsim_utils.test_connectivity(dev[0], hapd)
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
def test_owe_groups(dev, apdev):
"""Opportunistic Wireless Encryption - DH groups"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
for group in [19, 20, 21]:
dev[0].connect("owe", key_mgmt="OWE", owe_group=str(group))
hapd.wait_sta()
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
hapd.dump_monitor()
def test_owe_pmksa_caching(dev, apdev):
"""Opportunistic Wireless Encryption and PMKSA caching"""
try:
run_owe_pmksa_caching(dev, apdev)
finally:
dev[0].set("reassoc_same_bss_optim", "0")
def test_owe_pmksa_caching_connect_cmd(dev, apdev):
"""Opportunistic Wireless Encryption and PMKSA caching using cfg80211 connect command"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
try:
run_owe_pmksa_caching([wpas], apdev)
finally:
wpas.set("reassoc_same_bss_optim", "0")
def run_owe_pmksa_caching(dev, apdev):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].set("reassoc_same_bss_optim", "1")
dev[0].scan_for_bss(bssid, freq="2412")
id = dev[0].connect("owe", key_mgmt="OWE")
hapd.wait_sta()
hwsim_utils.test_connectivity(dev[0], hapd)
pmksa = dev[0].get_pmksa(bssid)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
dev[0].select_network(id, 2412)
dev[0].wait_connected()
hapd.wait_sta()
hwsim_utils.test_connectivity(dev[0], hapd)
pmksa2 = dev[0].get_pmksa(bssid)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
if "OK" not in hapd.request("PMKSA_FLUSH"):
raise Exception("PMKSA_FLUSH failed")
dev[0].select_network(id, 2412)
dev[0].wait_connected()
hapd.wait_sta()
hwsim_utils.test_connectivity(dev[0], hapd)
pmksa3 = dev[0].get_pmksa(bssid)
if pmksa is None or pmksa2 is None or pmksa3 is None:
raise Exception("PMKSA entry missing")
if pmksa['pmkid'] != pmksa2['pmkid']:
raise Exception("Unexpected PMKID change when using PMKSA caching")
if pmksa['pmkid'] == pmksa3['pmkid']:
raise Exception("PMKID did not change after PMKSA cache flush")
dev[0].request("REASSOCIATE")
dev[0].wait_connected()
pmksa4 = dev[0].get_pmksa(bssid)
if pmksa3['pmkid'] != pmksa4['pmkid']:
raise Exception("Unexpected PMKID change when using PMKSA caching [2]")
def test_owe_and_psk(dev, apdev):
"""Opportunistic Wireless Encryption and WPA2-PSK enabled"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe+psk",
"wpa": "2",
"wpa_key_mgmt": "OWE WPA-PSK",
"rsn_pairwise": "CCMP",
"wpa_passphrase": "12345678"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe+psk", psk="12345678")
hapd.wait_sta()
hwsim_utils.test_connectivity(dev[0], hapd)
dev[1].scan_for_bss(bssid, freq="2412")
dev[1].connect("owe+psk", key_mgmt="OWE")
hapd.wait_sta()
hwsim_utils.test_connectivity(dev[1], hapd)
def test_owe_transition_mode(dev, apdev):
"""Opportunistic Wireless Encryption transition mode"""
run_owe_transition_mode(dev, apdev)
def test_owe_transition_mode_connect_cmd(dev, apdev):
"""Opportunistic Wireless Encryption transition mode using cfg80211 connect command"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
run_owe_transition_mode([wpas], apdev)
def test_owe_transition_mode_mismatch1(dev, apdev):
"""Opportunistic Wireless Encryption transition mode (mismatch 1)"""
run_owe_transition_mode(dev, apdev, adv_bssid0="02:11:22:33:44:55")
def test_owe_transition_mode_mismatch2(dev, apdev):
"""Opportunistic Wireless Encryption transition mode (mismatch 2)"""
run_owe_transition_mode(dev, apdev, adv_bssid1="02:11:22:33:44:66")
def test_owe_transition_mode_mismatch3(dev, apdev):
"""Opportunistic Wireless Encryption transition mode (mismatch 3)"""
run_owe_transition_mode(dev, apdev, adv_bssid0="02:11:22:33:44:55",
adv_bssid1="02:11:22:33:44:66")
def run_owe_transition_mode(dev, apdev, adv_bssid0=None, adv_bssid1=None):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
adv_bssid = adv_bssid0 if adv_bssid0 else apdev[1]['bssid']
params = {"ssid": "owe-random",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"ieee80211w": "2",
"owe_transition_bssid": adv_bssid,
"owe_transition_ssid": '"owe-test"',
"ignore_broadcast_ssid": "1"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
adv_bssid = adv_bssid1 if adv_bssid1 else apdev[0]['bssid']
params = {"ssid": "owe-test",
"owe_transition_bssid": adv_bssid,
"owe_transition_ssid": '"owe-random"'}
hapd2 = hostapd.add_ap(apdev[1], params)
bssid2 = hapd2.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
bss = dev[0].get_bss(bssid)
if "[WPA2-OWE-CCMP]" not in bss['flags']:
raise Exception("OWE AKM not recognized: " + bss['flags'])
if "[OWE-TRANS]" not in bss['flags']:
raise Exception("OWE transition not recognized: " + bss['flags'])
bss = dev[0].get_bss(bssid2)
if "[OWE-TRANS-OPEN]" not in bss['flags']:
raise Exception("OWE transition (open) not recognized: " + bss['flags'])
id = dev[0].connect("owe-test", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
hapd.wait_sta()
hwsim_utils.test_connectivity(dev[0], hapd)
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
logger.info("Move to OWE only mode (disable transition mode)")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
hapd2.disable()
hapd.disable()
dev[0].flush_scan_cache()
hapd.set("owe_transition_bssid", "00:00:00:00:00:00")
hapd.set("ignore_broadcast_ssid", '0')
hapd.set("ssid", 'owe-test')
hapd.enable()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].select_network(id, 2412)
dev[0].wait_connected()
hapd.wait_sta()
hwsim_utils.test_connectivity(dev[0], hapd)
def test_owe_transition_mode_ifname(dev, apdev):
"""Opportunistic Wireless Encryption transition mode (ifname)"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-random",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"ieee80211w": "2",
"owe_transition_ifname": apdev[1]['ifname'],
"ignore_broadcast_ssid": "1"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
params = {"ssid": "owe-test",
"owe_transition_ifname": apdev[0]['ifname']}
hapd2 = hostapd.add_ap(apdev[1], params)
bssid2 = hapd2.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
id = dev[0].connect("owe-test", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
def test_owe_transition_mode_ifname_acs(dev, apdev):
"""Opportunistic Wireless Encryption transition mode (ifname, ACS)"""
run_owe_transition_mode_ifname_acs(dev, apdev, wait_first=False)
def test_owe_transition_mode_ifname_acs2(dev, apdev):
"""Opportunistic Wireless Encryption transition mode (ifname, ACS)"""
run_owe_transition_mode_ifname_acs(dev, apdev, wait_first=True)
def run_owe_transition_mode_ifname_acs(dev, apdev, wait_first):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-random",
"channel": "0",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"ieee80211w": "2",
"owe_transition_ifname": apdev[1]['ifname'],
"ignore_broadcast_ssid": "1"}
hapd = hostapd.add_ap(apdev[0], params, wait_enabled=False)
bssid = hapd.own_addr()
if wait_first:
wait_acs(hapd)
params = {"ssid": "owe-test",
"channel": "0",
"owe_transition_ifname": apdev[0]['ifname']}
hapd2 = hostapd.add_ap(apdev[1], params, wait_enabled=False)
bssid2 = hapd2.own_addr()
wait_acs(hapd2)
if not wait_first:
state = hapd.get_status_field("state")
if state == "ACS-STARTED":
time.sleep(5)
state = hapd.get_status_field("state")
if state != "ENABLED":
raise Exception("AP1 startup did not succeed")
freq = hapd.get_status_field("freq")
freq2 = hapd2.get_status_field("freq")
dev[0].scan_for_bss(bssid, freq=freq)
dev[0].scan_for_bss(bssid2, freq=freq2)
id = dev[0].connect("owe-test", key_mgmt="OWE", ieee80211w="2",
scan_freq="%s %s" % (freq, freq2))
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
def test_owe_transition_mode_open_only_ap(dev, apdev):
"""Opportunistic Wireless Encryption transition mode connect to open-only AP"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-test-open"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
bss = dev[0].get_bss(bssid)
id = dev[0].connect("owe-test-open", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], hapd)
val = dev[0].get_status_field("key_mgmt")
if val != "NONE":
raise Exception("Unexpected key_mgmt: " + val)
def test_owe_only_sta(dev, apdev):
"""Opportunistic Wireless Encryption transition mode disabled on STA"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-test-open"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
id = dev[0].connect("owe-test-open", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412", owe_only="1", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-NETWORK-NOT-FOUND"], timeout=10)
if not ev:
raise Exception("Unknown result for the connection attempt")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection to open network")
dev[0].request("DISCONNECT")
dev[0].dump_monitor()
params = {"ssid": "owe-test-open",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd2 = hostapd.add_ap(apdev[1], params)
dev[0].request("RECONNECT")
dev[0].wait_connected()
def test_owe_transition_mode_open_multiple_scans(dev, apdev):
"""Opportunistic Wireless Encryption transition mode and need for multiple scans"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-test",
"owe_transition_bssid": apdev[0]['bssid'],
"owe_transition_ssid": '"owe-random"'}
hapd2 = hostapd.add_ap(apdev[1], params)
bssid2 = hapd2.own_addr()
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].dump_monitor()
id = dev[0].connect("owe-test", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=1)
params = {"ssid": "owe-random",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"ieee80211w": "2",
"owe_transition_bssid": apdev[1]['bssid'],
"owe_transition_ssid": '"owe-test"',
"ignore_broadcast_ssid": "1"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].wait_connected()
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
def test_owe_transition_mode_multi_bss(dev, apdev):
"""Opportunistic Wireless Encryption transition mode (multi BSS)"""
try:
run_owe_transition_mode_multi_bss(dev, apdev)
finally:
dev[0].request("SCAN_INTERVAL 5")
def run_owe_transition_mode_multi_bss(dev, apdev):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
ifname1 = apdev[0]['ifname']
ifname2 = apdev[0]['ifname'] + '-2'
hapd1 = hostapd.add_bss(apdev[0], ifname1, 'owe-bss-1.conf')
hapd2 = hostapd.add_bss(apdev[0], ifname2, 'owe-bss-2.conf')
hapd2.bssidx = 1
bssid = hapd1.own_addr()
bssid2 = hapd2.own_addr()
# Beaconing with the OWE Transition Mode element can start only once both
# BSSs are enabled, so the very first Beacon frame may go out without this
# element. Wait a bit to avoid getting incomplete scan results.
time.sleep(0.1)
dev[0].request("SCAN_INTERVAL 1")
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("transition-mode-open", key_mgmt="OWE")
val = dev[0].get_status_field("bssid")
if val != bssid2:
raise Exception("Unexpected bssid: " + val)
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
hwsim_utils.test_connectivity(dev[0], hapd2)
def test_owe_transition_mode_rsne_mismatch(dev, apdev):
"""Opportunistic Wireless Encryption transition mode and RSNE mismatch"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-random",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"ieee80211w": "2",
"rsne_override_eapol": "30140100000fac040100000fac040100000fac020c00",
"owe_transition_bssid": apdev[1]['bssid'],
"owe_transition_ssid": '"owe-test"',
"ignore_broadcast_ssid": "1"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
params = {"ssid": "owe-test",
"owe_transition_bssid": apdev[0]['bssid'],
"owe_transition_ssid": '"owe-random"'}
hapd2 = hostapd.add_ap(apdev[1], params)
bssid2 = hapd2.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
id = dev[0].connect("owe-test", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["PMKSA-CACHE-ADDED"], timeout=5)
if ev is None:
raise Exception("OWE PMKSA not created")
ev = dev[0].wait_event(["WPA: IE in 3/4 msg does not match with IE in Beacon/ProbeResp"],
timeout=5)
if ev is None:
raise Exception("RSNE mismatch not reported")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-DISCONNECTED"], timeout=5)
dev[0].request("REMOVE_NETWORK all")
if ev is None:
raise Exception("No disconnection seen")
if "CTRL-EVENT-DISCONNECTED" not in ev:
raise Exception("Unexpected connection")
if "reason=17 locally_generated=1" not in ev:
raise Exception("Unexpected disconnection reason: " + ev)
def test_owe_unsupported_group(dev, apdev):
"""Opportunistic Wireless Encryption and unsupported group"""
try:
run_owe_unsupported_group(dev, apdev)
finally:
dev[0].request("VENDOR_ELEM_REMOVE 13 *")
def test_owe_unsupported_group_connect_cmd(dev, apdev):
"""Opportunistic Wireless Encryption and unsupported group using cfg80211 connect command"""
try:
wpas = None
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
run_owe_unsupported_group([wpas], apdev)
finally:
if wpas:
wpas.request("VENDOR_ELEM_REMOVE 13 *")
def run_owe_unsupported_group(dev, apdev):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
# Override OWE Dh Parameters element with a payload that uses invalid group
# 0 (and actual group 19 data) to make the AP reject this with the specific
# status code 77.
dev[0].request("VENDOR_ELEM_ADD 13 ff23200000783590fb7440e03d5b3b33911f86affdcc6b4411b707846ac4ff08ddc8831ccd")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
def test_owe_limited_group_set(dev, apdev):
"""Opportunistic Wireless Encryption and limited group set"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"owe_groups": "20 21"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
dev[0].dump_monitor()
for group in [20, 21]:
dev[0].connect("owe", key_mgmt="OWE", owe_group=str(group))
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
def test_owe_limited_group_set_pmf(dev, apdev, params):
"""Opportunistic Wireless Encryption and limited group set (PMF)"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
pcapng = os.path.join(params['logdir'], "hwsim0.pcapng")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"owe_groups": "21"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
dev[0].dump_monitor()
dev[0].connect("owe", key_mgmt="OWE", owe_group="20", ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected (2)")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason (2): " + ev)
dev[0].dump_monitor()
dev[0].connect("owe", key_mgmt="OWE", owe_group="21", ieee80211w="2",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
out = run_tshark(pcapng,
"wlan.fc.type_subtype == 1",
display=['wlan_mgt.fixed.status_code'])
status = out.splitlines()
logger.info("Association Response frame status codes: " + str(status))
if len(status) != 3:
raise Exception("Unexpected number of Association Response frames")
if (int(status[0], base=0) != 77 or int(status[1], base=0) != 77 or
int(status[2], base=0) != 0):
raise Exception("Unexpected Association Response frame status code")
def test_owe_group_negotiation(dev, apdev):
"""Opportunistic Wireless Encryption and group negotiation"""
run_owe_group_negotiation(dev[0], apdev)
def test_owe_group_negotiation_connect_cmd(dev, apdev):
"""Opportunistic Wireless Encryption and group negotiation (connect command)"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
run_owe_group_negotiation(wpas, apdev)
def run_owe_group_negotiation(dev, apdev):
if "OWE" not in dev.get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"owe_groups": "21"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev.scan_for_bss(bssid, freq="2412")
dev.connect("owe", key_mgmt="OWE")
def test_owe_assoc_reject(dev, apdev):
"""Opportunistic Wireless Encryption association rejection handling"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"require_ht": "1",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"owe_groups": "19"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
# First, reject two associations with HT-required (i.e., not OWE related)
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
disable_ht="1", scan_freq="2412", wait_connect=False)
for i in range(0, 2):
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
if ev is None:
raise Exception("Association rejection not reported")
# Then, verify that STA tries OWE with the default group (19) on the next
# attempt instead of having moved to testing another group.
hapd.set("require_ht", "0")
for i in range(0, 2):
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT",
"CTRL-EVENT-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association result not reported")
if "CTRL-EVENT-CONNECTED" in ev:
break
if "status_code=77" in ev:
raise Exception("Unexpected unsupport group rejection")
if "CTRL-EVENT-CONNECTED" not in ev:
raise Exception("Did not connect successfully")
def test_owe_local_errors(dev, apdev):
"""Opportunistic Wireless Encryption - local errors on supplicant"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
tests = [(1, "crypto_ecdh_init;owe_build_assoc_req"),
(1, "crypto_ecdh_get_pubkey;owe_build_assoc_req"),
(1, "wpabuf_alloc;owe_build_assoc_req")]
for count, func in tests:
with alloc_fail(dev[0], count, func):
dev[0].connect("owe", key_mgmt="OWE", owe_group="20",
ieee80211w="2",
scan_freq="2412", wait_connect=False)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
tests = [(1, "crypto_ecdh_set_peerkey;owe_process_assoc_resp"),
(1, "crypto_ecdh_get_pubkey;owe_process_assoc_resp"),
(1, "wpabuf_alloc;=owe_process_assoc_resp")]
for count, func in tests:
with alloc_fail(dev[0], count, func):
dev[0].connect("owe", key_mgmt="OWE", owe_group="20",
ieee80211w="2",
scan_freq="2412", wait_connect=False)
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
tests = [(1, "hmac_sha256;owe_process_assoc_resp", 19),
(1, "hmac_sha256_kdf;owe_process_assoc_resp", 19),
(1, "hmac_sha384;owe_process_assoc_resp", 20),
(1, "hmac_sha384_kdf;owe_process_assoc_resp", 20),
(1, "hmac_sha512;owe_process_assoc_resp", 21),
(1, "hmac_sha512_kdf;owe_process_assoc_resp", 21)]
for count, func, group in tests:
with fail_test(dev[0], count, func):
dev[0].connect("owe", key_mgmt="OWE", owe_group=str(group),
ieee80211w="2",
scan_freq="2412", wait_connect=False)
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
dev[0].connect("owe", key_mgmt="OWE", owe_group="18",
ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["SME: Trying to authenticate"], timeout=5)
if ev is None:
raise Exception("No authentication attempt")
time.sleep(0.5)
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
def hapd_auth(hapd):
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame not received")
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = struct.pack('<HHH', 0, 2, 0)
hapd.mgmt_tx(resp)
def hapd_assoc(hapd, extra):
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out")
if req['subtype'] == 0:
break
req = None
if not req:
raise Exception("Association Request frame not received")
resp = {}
resp['fc'] = 0x0010
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
payload = struct.pack('<HHH', 0x0411, 0, 0xc001)
payload += binascii.unhexlify("010882848b960c121824")
resp['payload'] = payload + extra
hapd.mgmt_tx(resp)
def test_owe_invalid_assoc_resp(dev, apdev):
"""Opportunistic Wireless Encryption - invalid Association Response frame"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
hapd.set("ext_mgmt_frame_handling", "1")
# OWE: No Diffie-Hellman Parameter element found in Association Response frame
tests = [b'']
# No room for group --> no DH Params
tests += [binascii.unhexlify('ff0120')]
# OWE: Unexpected Diffie-Hellman group in response: 18
tests += [binascii.unhexlify('ff03201200')]
# OWE: Invalid peer DH public key
tests += [binascii.unhexlify('ff23201300' + 31*'00' + '01')]
# OWE: Invalid peer DH public key
tests += [binascii.unhexlify('ff24201300' + 33*'ee')]
for extra in tests:
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", ieee80211w="2",
scan_freq="2412", wait_connect=False)
hapd_auth(hapd)
hapd_assoc(hapd, extra)
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
# OWE: Empty public key (this ends up getting padded to a valid point)
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", ieee80211w="2",
scan_freq="2412", wait_connect=False)
hapd_auth(hapd)
hapd_assoc(hapd, binascii.unhexlify('ff03201300'))
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED", "PMKSA-CACHE-ADDED"],
timeout=5)
if ev is None:
raise Exception("No result reported for empty public key")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
def start_owe(dev, apdev, workaround=0):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"owe_ptk_workaround": str(workaround),
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(hapd.own_addr(), freq="2412")
return hapd
def owe_check_ok(dev, hapd, owe_group, owe_ptk_workaround):
dev.connect("owe", key_mgmt="OWE", ieee80211w="2",
owe_group=owe_group, owe_ptk_workaround=owe_ptk_workaround,
scan_freq="2412")
hapd.wait_sta()
dev.request("REMOVE_NETWORK all")
dev.wait_disconnected()
dev.dump_monitor()
def test_owe_ptk_workaround_ap(dev, apdev):
"""Opportunistic Wireless Encryption - AP using PTK workaround"""
hapd = start_owe(dev, apdev, workaround=1)
for group, workaround in [(19, 0), (20, 0), (21, 0),
(19, 1), (20, 1), (21, 1)]:
owe_check_ok(dev[0], hapd, str(group), str(workaround))
def | (dev, apdev):
"""Opportunistic Wireless Encryption - PTK derivation hash alg"""
hapd = start_owe(dev, apdev)
for group, workaround in [(19, 0), (20, 0), (21, 0), (19, 1)]:
owe_check_ok(dev[0], hapd, str(group), str(workaround))
for group in [20, 21]:
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
owe_group=str(group), owe_ptk_workaround="1",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["PMKSA-CACHE-ADDED"], timeout=10)
if ev is None:
raise Exception("Could not complete OWE association")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-DISCONNECTED"], timeout=5)
if ev is None:
raise Exception("Unknown connection result")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
dev[0].request("REMOVE_NETWORK all")
ev = dev[0].wait_event(["PMKSA-CACHE-REMOVED"], timeout=5)
if ev is None:
raise Exception("No PMKSA cache removal event seen")
dev[0].dump_monitor()
def test_owe_transition_mode_disable(dev, apdev):
"""Opportunistic Wireless Encryption transition mode disable"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-random",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"ieee80211w": "2",
"transition_disable": '0x08',
"owe_transition_bssid": apdev[1]['bssid'],
"owe_transition_ssid": '"owe-test"',
"ignore_broadcast_ssid": "1"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
params = {"ssid": "owe-test",
"owe_transition_bssid": apdev[0]['bssid'],
"owe_transition_ssid": '"owe-random"'}
hapd2 = hostapd.add_ap(apdev[1], params)
bssid2 = hapd2.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
id = dev[0].connect("owe-test", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
ev = dev[0].wait_event(["TRANSITION-DISABLE"], timeout=1)
if ev is None:
raise Exception("Transition disable not indicated")
if ev.split(' ')[1] != "08":
raise Exception("Unexpected transition disable bitmap: " + ev)
val = dev[0].get_network(id, "owe_only")
if val != "1":
raise Exception("Unexpected owe_only value: " + val)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].request("RECONNECT")
dev[0].wait_connected()
def test_owe_sa_query(dev, apdev):
"""Opportunistic Wireless Encryption - SA Query"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", ieee80211w="2",
scan_freq="2412")
hapd.wait_sta()
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
hapd.set("ext_mgmt_frame_handling", "0")
dev[0].request("PMKSA_FLUSH")
dev[0].request("REASSOCIATE")
dev[0].wait_connected(timeout=10, error="Timeout on re-connection")
| test_owe_ptk_hash |
main.go | package main
import (
"HttpServer/config"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"path/filepath"
"time"
log1 "github.com/sirupsen/logrus"
)
var conf *config.Config
var (
log *log1.Logger
logFile *os.File
logDir string = "logs"
)
func init() {
initLog()
conf = &config.Config{Server: ":8080", Uris: []config.Uri{
{Path: "/", Body: "{\"code\":\"200\",\"msg\":\"success\"}", Headers: map[string]string{"Content-Type": "application/json"}},
}}
dir, _ := os.Getwd()
confPath := filepath.Join(dir, "conf.json")
if !IsExist(confPath) {
s2, _ := json.MarshalIndent(*conf, "", " ")
ioutil.WriteFile(confPath, []byte(s2), os.FileMode(0666))
}
bConf, e := ioutil.ReadFile(confPath)
if e != nil {
log.Println("Error", e)
}
json.Unmarshal(bConf, conf)
// sConf := string(bConf)
// log.Println("conf", confPath, sConf)
// s1, _ := json.MarshalIndent(*conf, "", " ")
// log.Println(string(s1))
s1, _ := json.Marshal(*conf)
log.Info(string(s1))
initHandle()
}
func initLog() |
func initHandle() {
for _, _uri := range conf.Uris {
http.HandleFunc(_uri.Path, func(w http.ResponseWriter, r *http.Request) {
rBody, _ := ioutil.ReadAll(r.Body)
sBody := string(rBody)
// log.Println("Req: ", r.URL.Path, r.Method, r.Header, sBody)
log.WithFields((log1.Fields{
"Path": r.URL.Path,
"Method": r.Method,
"Headers": r.Header,
"Body": sBody,
})).Info("Req")
uri := findURIByPath(r.URL.Path)
if uri == nil {
w.Header().Set("Content-Type", "application/json")
rspBody := "{\"code\":\"200\",\"msg\":\"success\"}"
fmt.Fprintf(w, rspBody)
// log.Println("Unknown Path: " + r.URL.Path + ", Use Default")
log.WithFields((log1.Fields{
"Path": r.URL.Path,
"Headers": w.Header(),
"Body": rspBody,
})).Info("Rsp")
return
}
for k, v := range uri.Headers {
w.Header().Set(k, v)
}
fmt.Fprintf(w, uri.Body)
// log.Println("Rsp", w.Header(), uri.Body)
log.WithFields((log1.Fields{
"Path": r.URL.Path,
"Headers": w.Header(),
"Body": uri.Body,
})).Info("Rsp")
})
}
}
func findURIByPath(path string) (uri *config.Uri) {
uri = nil
for _, _uri := range conf.Uris {
if _uri.Path == path {
uri = &_uri
break
}
}
return uri
}
func main() {
defer logFile.Close()
log.WithFields(log1.Fields{"Server": conf.Server}).Info("Start")
log.Fatal(http.ListenAndServe(conf.Server, nil))
}
// IsExist checks whether a file or directory exists.
// It returns false when the file or directory does not exist.
func IsExist(f string) bool {
_, err := os.Stat(f)
return err == nil || os.IsExist(err)
}
| {
os.MkdirAll(logDir, os.ModePerm)
// t := time.Now().Format("20060102150405")
t := time.Now().Format("2006010215")
logPath := path.Join(logDir, "log_"+t+".log")
logFile, _ = os.OpenFile(logPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
if logFile == nil {
log1.Fatalln("Open Log File Error")
}
log = log1.New()
log.Out = io.MultiWriter(os.Stdout, logFile)
} |
cPatches.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'cPatches.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class | (object):
def setupUi(self, Patches):
Patches.setObjectName("Patches")
Patches.resize(497, 492)
self.verticalLayout = QtWidgets.QVBoxLayout(Patches)
self.verticalLayout.setObjectName("verticalLayout")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.label_23 = QtWidgets.QLabel(Patches)
self.label_23.setFrameShadow(QtWidgets.QFrame.Raised)
self.label_23.setObjectName("label_23")
self.gridLayout.addWidget(self.label_23, 0, 0, 1, 1)
self.listWidget = QtWidgets.QListWidget(Patches)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.listWidget.sizePolicy().hasHeightForWidth())
self.listWidget.setSizePolicy(sizePolicy)
self.listWidget.setObjectName("listWidget")
self.gridLayout.addWidget(self.listWidget, 1, 0, 1, 1)
self.stackedWidget = QtWidgets.QStackedWidget(Patches)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.stackedWidget.sizePolicy().hasHeightForWidth())
self.stackedWidget.setSizePolicy(sizePolicy)
self.stackedWidget.setObjectName("stackedWidget")
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.page)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.gridLayout_3 = QtWidgets.QGridLayout()
self.gridLayout_3.setObjectName("gridLayout_3")
self.label_27 = QtWidgets.QLabel(self.page)
self.label_27.setText("")
self.label_27.setObjectName("label_27")
self.gridLayout_3.addWidget(self.label_27, 7, 1, 1, 1)
self.b_21 = QtWidgets.QPushButton(self.page)
self.b_21.setText("")
self.b_21.setObjectName("b_21")
self.gridLayout_3.addWidget(self.b_21, 3, 1, 1, 1)
self.label_15 = QtWidgets.QLabel(self.page)
self.label_15.setObjectName("label_15")
self.gridLayout_3.addWidget(self.label_15, 5, 0, 1, 1)
self.label_17 = QtWidgets.QLabel(self.page)
self.label_17.setObjectName("label_17")
self.gridLayout_3.addWidget(self.label_17, 0, 0, 1, 1)
self.b_22 = QtWidgets.QPushButton(self.page)
self.b_22.setText("")
self.b_22.setObjectName("b_22")
self.gridLayout_3.addWidget(self.b_22, 4, 1, 1, 1)
self.t2 = QtWidgets.QRadioButton(self.page)
self.t2.setObjectName("t2")
self.gridLayout_3.addWidget(self.t2, 2, 0, 1, 1)
self.label_16 = QtWidgets.QLabel(self.page)
self.label_16.setObjectName("label_16")
self.gridLayout_3.addWidget(self.label_16, 4, 0, 1, 1)
self.label_14 = QtWidgets.QLabel(self.page)
self.label_14.setObjectName("label_14")
self.gridLayout_3.addWidget(self.label_14, 3, 0, 1, 1)
self.t1 = QtWidgets.QRadioButton(self.page)
self.t1.setObjectName("t1")
self.gridLayout_3.addWidget(self.t1, 1, 0, 1, 1)
self.trans1 = QtWidgets.QLineEdit(self.page)
self.trans1.setObjectName("trans1")
self.gridLayout_3.addWidget(self.trans1, 5, 1, 1, 1)
self.label_19 = QtWidgets.QLabel(self.page)
self.label_19.setText("")
self.label_19.setObjectName("label_19")
self.gridLayout_3.addWidget(self.label_19, 9, 0, 1, 1)
self.label_26 = QtWidgets.QLabel(self.page)
self.label_26.setText("")
self.label_26.setObjectName("label_26")
self.gridLayout_3.addWidget(self.label_26, 7, 0, 1, 1)
self.label_20 = QtWidgets.QLabel(self.page)
self.label_20.setText("")
self.label_20.setObjectName("label_20")
self.gridLayout_3.addWidget(self.label_20, 9, 1, 1, 1)
self.label_25 = QtWidgets.QLabel(self.page)
self.label_25.setText("")
self.label_25.setObjectName("label_25")
self.gridLayout_3.addWidget(self.label_25, 8, 1, 1, 1)
self.label_24 = QtWidgets.QLabel(self.page)
self.label_24.setText("")
self.label_24.setObjectName("label_24")
self.gridLayout_3.addWidget(self.label_24, 8, 0, 1, 1)
self.label_28 = QtWidgets.QLabel(self.page)
self.label_28.setText("")
self.label_28.setObjectName("label_28")
self.gridLayout_3.addWidget(self.label_28, 6, 0, 1, 1)
self.label_29 = QtWidgets.QLabel(self.page)
self.label_29.setText("")
self.label_29.setObjectName("label_29")
self.gridLayout_3.addWidget(self.label_29, 6, 1, 1, 1)
self.verticalLayout_4.addLayout(self.gridLayout_3)
self.stackedWidget.addWidget(self.page)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.page_2)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.ggglayout = QtWidgets.QGridLayout()
self.ggglayout.setObjectName("ggglayout")
self.label_34 = QtWidgets.QLabel(self.page_2)
self.label_34.setText("")
self.label_34.setObjectName("label_34")
self.ggglayout.addWidget(self.label_34, 7, 0, 1, 1)
self.i2 = QtWidgets.QRadioButton(self.page_2)
self.i2.setObjectName("i2")
self.ggglayout.addWidget(self.i2, 2, 0, 1, 1)
self.label_2 = QtWidgets.QLabel(self.page_2)
self.label_2.setObjectName("label_2")
self.ggglayout.addWidget(self.label_2, 4, 0, 1, 1)
self.label = QtWidgets.QLabel(self.page_2)
self.label.setObjectName("label")
self.ggglayout.addWidget(self.label, 3, 0, 1, 1)
self.i1 = QtWidgets.QRadioButton(self.page_2)
self.i1.setObjectName("i1")
self.ggglayout.addWidget(self.i1, 1, 0, 1, 1)
self.label_21 = QtWidgets.QLabel(self.page_2)
self.label_21.setObjectName("label_21")
self.ggglayout.addWidget(self.label_21, 0, 0, 1, 1)
self.cb_81 = QtWidgets.QComboBox(self.page_2)
self.cb_81.setObjectName("cb_81")
self.cb_81.addItem("")
self.cb_81.addItem("")
self.cb_81.addItem("")
self.cb_81.addItem("")
self.cb_81.addItem("")
self.ggglayout.addWidget(self.cb_81, 3, 1, 1, 1)
self.cb_83 = QtWidgets.QComboBox(self.page_2)
self.cb_83.setObjectName("cb_83")
self.cb_83.addItem("")
self.cb_83.addItem("")
self.cb_83.addItem("")
self.cb_83.addItem("")
self.cb_83.addItem("")
self.ggglayout.addWidget(self.cb_83, 5, 1, 1, 1)
self.label_3 = QtWidgets.QLabel(self.page_2)
self.label_3.setObjectName("label_3")
self.ggglayout.addWidget(self.label_3, 5, 0, 1, 1)
self.cb_82 = QtWidgets.QComboBox(self.page_2)
self.cb_82.setObjectName("cb_82")
self.cb_82.addItem("")
self.cb_82.addItem("")
self.cb_82.addItem("")
self.cb_82.addItem("")
self.cb_82.addItem("")
self.ggglayout.addWidget(self.cb_82, 4, 1, 1, 1)
self.label_33 = QtWidgets.QLabel(self.page_2)
self.label_33.setText("")
self.label_33.setObjectName("label_33")
self.ggglayout.addWidget(self.label_33, 8, 1, 1, 1)
self.label_30 = QtWidgets.QLabel(self.page_2)
self.label_30.setText("")
self.label_30.setObjectName("label_30")
self.ggglayout.addWidget(self.label_30, 9, 0, 1, 1)
self.label_35 = QtWidgets.QLabel(self.page_2)
self.label_35.setText("")
self.label_35.setObjectName("label_35")
self.ggglayout.addWidget(self.label_35, 7, 1, 1, 1)
self.label_31 = QtWidgets.QLabel(self.page_2)
self.label_31.setText("")
self.label_31.setObjectName("label_31")
self.ggglayout.addWidget(self.label_31, 9, 1, 1, 1)
self.label_36 = QtWidgets.QLabel(self.page_2)
self.label_36.setText("")
self.label_36.setObjectName("label_36")
self.ggglayout.addWidget(self.label_36, 6, 0, 1, 1)
self.label_37 = QtWidgets.QLabel(self.page_2)
self.label_37.setText("")
self.label_37.setObjectName("label_37")
self.ggglayout.addWidget(self.label_37, 6, 1, 1, 1)
self.verticalLayout_2.addLayout(self.ggglayout)
self.stackedWidget.addWidget(self.page_2)
self.page_3 = QtWidgets.QWidget()
self.page_3.setObjectName("page_3")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.page_3)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setObjectName("gridLayout_2")
self.cb_111 = QtWidgets.QComboBox(self.page_3)
self.cb_111.setObjectName("cb_111")
self.cb_111.addItem("")
self.cb_111.addItem("")
self.cb_111.addItem("")
self.cb_111.addItem("")
self.cb_111.addItem("")
self.gridLayout_2.addWidget(self.cb_111, 9, 1, 1, 1)
self.e1 = QtWidgets.QRadioButton(self.page_3)
self.e1.setObjectName("e1")
self.gridLayout_2.addWidget(self.e1, 1, 0, 1, 1)
self.label_5 = QtWidgets.QLabel(self.page_3)
self.label_5.setObjectName("label_5")
self.gridLayout_2.addWidget(self.label_5, 3, 0, 1, 1)
self.b_112 = QtWidgets.QPushButton(self.page_3)
self.b_112.setText("")
self.b_112.setObjectName("b_112")
self.gridLayout_2.addWidget(self.b_112, 4, 1, 1, 1)
self.label_6 = QtWidgets.QLabel(self.page_3)
self.label_6.setObjectName("label_6")
self.gridLayout_2.addWidget(self.label_6, 4, 0, 1, 1)
self.label_10 = QtWidgets.QLabel(self.page_3)
self.label_10.setObjectName("label_10")
self.gridLayout_2.addWidget(self.label_10, 8, 0, 1, 1)
self.cb_113 = QtWidgets.QComboBox(self.page_3)
self.cb_113.setObjectName("cb_113")
self.cb_113.addItem("")
self.cb_113.addItem("")
self.cb_113.addItem("")
self.cb_113.addItem("")
self.cb_113.addItem("")
self.gridLayout_2.addWidget(self.cb_113, 11, 1, 1, 1)
self.label_7 = QtWidgets.QLabel(self.page_3)
self.label_7.setObjectName("label_7")
self.gridLayout_2.addWidget(self.label_7, 5, 0, 1, 1)
self.b_114 = QtWidgets.QPushButton(self.page_3)
self.b_114.setText("")
self.b_114.setObjectName("b_114")
self.gridLayout_2.addWidget(self.b_114, 6, 1, 1, 1)
self.cb_112 = QtWidgets.QComboBox(self.page_3)
self.cb_112.setObjectName("cb_112")
self.cb_112.addItem("")
self.cb_112.addItem("")
self.cb_112.addItem("")
self.cb_112.addItem("")
self.cb_112.addItem("")
self.gridLayout_2.addWidget(self.cb_112, 10, 1, 1, 1)
self.b_113 = QtWidgets.QPushButton(self.page_3)
self.b_113.setText("")
self.b_113.setObjectName("b_113")
self.gridLayout_2.addWidget(self.b_113, 5, 1, 1, 1)
self.b_115 = QtWidgets.QPushButton(self.page_3)
self.b_115.setText("")
self.b_115.setObjectName("b_115")
self.gridLayout_2.addWidget(self.b_115, 7, 1, 1, 1)
self.b_111 = QtWidgets.QPushButton(self.page_3)
self.b_111.setText("")
self.b_111.setObjectName("b_111")
self.gridLayout_2.addWidget(self.b_111, 3, 1, 1, 1)
self.label_13 = QtWidgets.QLabel(self.page_3)
self.label_13.setObjectName("label_13")
self.gridLayout_2.addWidget(self.label_13, 10, 0, 1, 1)
self.label_4 = QtWidgets.QLabel(self.page_3)
self.label_4.setObjectName("label_4")
self.gridLayout_2.addWidget(self.label_4, 0, 0, 1, 1)
self.label_9 = QtWidgets.QLabel(self.page_3)
self.label_9.setObjectName("label_9")
self.gridLayout_2.addWidget(self.label_9, 7, 0, 1, 1)
self.e2 = QtWidgets.QRadioButton(self.page_3)
self.e2.setObjectName("e2")
self.gridLayout_2.addWidget(self.e2, 2, 0, 1, 1)
self.label_12 = QtWidgets.QLabel(self.page_3)
self.label_12.setObjectName("label_12")
self.gridLayout_2.addWidget(self.label_12, 9, 0, 1, 1)
self.trans3 = QtWidgets.QLineEdit(self.page_3)
self.trans3.setObjectName("trans3")
self.gridLayout_2.addWidget(self.trans3, 8, 1, 1, 1)
self.label_22 = QtWidgets.QLabel(self.page_3)
self.label_22.setObjectName("label_22")
self.gridLayout_2.addWidget(self.label_22, 11, 0, 1, 1)
self.label_8 = QtWidgets.QLabel(self.page_3)
self.label_8.setObjectName("label_8")
self.gridLayout_2.addWidget(self.label_8, 6, 0, 1, 1)
self.label_11 = QtWidgets.QLabel(self.page_3)
self.label_11.setText("")
self.label_11.setObjectName("label_11")
self.gridLayout_2.addWidget(self.label_11, 12, 0, 1, 1)
self.label_18 = QtWidgets.QLabel(self.page_3)
self.label_18.setText("")
self.label_18.setObjectName("label_18")
self.gridLayout_2.addWidget(self.label_18, 12, 1, 1, 1)
self.verticalLayout_3.addLayout(self.gridLayout_2)
self.stackedWidget.addWidget(self.page_3)
self.gridLayout.addWidget(self.stackedWidget, 1, 1, 1, 1)
self.verticalLayout.addLayout(self.gridLayout)
self.buttonBox = QtWidgets.QDialogButtonBox(Patches)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Patches)
self.stackedWidget.setCurrentIndex(0)
self.buttonBox.accepted.connect(Patches.accept)
self.buttonBox.rejected.connect(Patches.reject)
QtCore.QMetaObject.connectSlotsByName(Patches)
def retranslateUi(self, Patches):
_translate = QtCore.QCoreApplication.translate
Patches.setWindowTitle(_translate("Patches", "Colors"))
self.label_23.setText(_translate("Patches", "classes"))
self.label_15.setText(_translate("Patches", "transparency (0~1)"))
self.label_17.setText(_translate("Patches", "2 classes"))
self.t2.setText(_translate("Patches", "DIY"))
self.label_16.setText(_translate("Patches", "artifacts"))
self.label_14.setText(_translate("Patches", "reference"))
self.t1.setText(_translate("Patches", "default"))
self.i2.setText(_translate("Patches", "DIY"))
self.label_2.setText(_translate("Patches", "shim"))
self.label.setText(_translate("Patches", "movement"))
self.i1.setText(_translate("Patches", "default"))
self.label_21.setText(_translate("Patches", "8 classes"))
self.cb_81.setItemText(0, _translate("Patches", "//"))
self.cb_81.setItemText(1, _translate("Patches", "\\\\"))
self.cb_81.setItemText(2, _translate("Patches", "XX"))
self.cb_81.setItemText(3, _translate("Patches", "--"))
self.cb_81.setItemText(4, _translate("Patches", "**"))
self.cb_83.setItemText(0, _translate("Patches", "//"))
self.cb_83.setItemText(1, _translate("Patches", "\\\\"))
self.cb_83.setItemText(2, _translate("Patches", "XX"))
self.cb_83.setItemText(3, _translate("Patches", "--"))
self.cb_83.setItemText(4, _translate("Patches", "**"))
self.label_3.setText(_translate("Patches", "noise"))
self.cb_82.setItemText(0, _translate("Patches", "//"))
self.cb_82.setItemText(1, _translate("Patches", "\\\\"))
self.cb_82.setItemText(2, _translate("Patches", "XX"))
self.cb_82.setItemText(3, _translate("Patches", "--"))
self.cb_82.setItemText(4, _translate("Patches", "**"))
self.cb_111.setItemText(0, _translate("Patches", "//"))
self.cb_111.setItemText(1, _translate("Patches", "\\\\"))
self.cb_111.setItemText(2, _translate("Patches", "XX"))
self.cb_111.setItemText(3, _translate("Patches", "--"))
self.cb_111.setItemText(4, _translate("Patches", "**"))
self.e1.setText(_translate("Patches", "default"))
self.label_5.setText(_translate("Patches", "t1 head"))
self.label_6.setText(_translate("Patches", "t1 abdomen"))
self.label_10.setText(_translate("Patches", "transparency (0~1)"))
self.cb_113.setItemText(0, _translate("Patches", "//"))
self.cb_113.setItemText(1, _translate("Patches", "\\\\"))
self.cb_113.setItemText(2, _translate("Patches", "XX"))
self.cb_113.setItemText(3, _translate("Patches", "--"))
self.cb_113.setItemText(4, _translate("Patches", "**"))
self.label_7.setText(_translate("Patches", "t2 abdomen"))
self.cb_112.setItemText(0, _translate("Patches", "//"))
self.cb_112.setItemText(1, _translate("Patches", "\\\\"))
self.cb_112.setItemText(2, _translate("Patches", "XX"))
self.cb_112.setItemText(3, _translate("Patches", "--"))
self.cb_112.setItemText(4, _translate("Patches", "**"))
self.label_13.setText(_translate("Patches", "shim"))
self.label_4.setText(_translate("Patches", "11 classes"))
self.label_9.setText(_translate("Patches", "t2 liver"))
self.e2.setText(_translate("Patches", "DIY"))
self.label_12.setText(_translate("Patches", "movement"))
self.label_22.setText(_translate("Patches", "both"))
self.label_8.setText(_translate("Patches", "t1 liver"))
| Ui_Patches |
base.py | #!/usr/bin/python3
"""Base functionality for Cinch AI agents.
Method reference:
class AIBase
--send_data(data)
--handle_daemon_command(raw_msg)
--run()
--start()
--stop()
--bid(bid)
--chat(chat_msg)
--play(card_val)
--is_legal_bid(bid)
--is_legal_play(card)
--act()
TODO:
- have 'thinking' timeout value (halt thinking after certain interval)
--- mandate a timeout; can be a Timer call to change a loop value & publish
--- can specify timeout in this file? want all agent models equal in this
--- To devs: don't publish an AI that takes forever to do anything!
- if AI are allowed to use DB, impl methods here
"""
from multiprocessing import Pipe
import logging
log = logging.getLogger(__name__)
from core.cards import RANKS_SHORT, SUITS_SHORT, NUM_RANKS
# Settings
SERVER_HOST = "localhost"
SERVER_PORT = 2424
SERVER_URL = "{0}:{1}".format(SERVER_HOST, SERVER_PORT)
# Not currently used
THINKING_TIMEOUT = 10.0 # Secs to allow AI to think before demanding response
# Constants
EVENT_NEW_GAME = 0 # Integer constants for error handling
EVENT_JOIN_GAME = 1 #
EVENT_BID = 2 #
EVENT_PLAY = 3 #
# Hardcoded values to increase performance in decoding cards
NUM_TEAMS = 2
NUM_PLAYERS = 4
class AIBase:
"""Common features of all Cinch AI Agents."""
####################
# Agent Management -- Creation/maintenance of Agent; metagame functions
####################
def __init__(self, pipe, identity):
# Instance variables
self.uid = 0
self.manager = None
self.running = False
self.pipe = pipe # type = multiprocessing.Pipe
self.queue = None # will be a multiprocessing.Queue for sending to Mgr
self.name = identity['name']
self.label = self.name
# Game
self.in_game = False
self.pNum = -1
self.hand = [] # list of Card objects will get cloned from the Game
# Game state
self.gs = None
log.info("{0}AI loaded".format(self.name))
def __del__(self):
"""Safely shutdown AI Agent subprocess."""
# Let manager know agent is shutting down
pass #TODO
self.running = False
#TODO - log final state?
####################
# Message interface
####################
def send_data(self, data):
"""Send information to game via AI Manager queue
data (dict): data to send
"""
self.queue.put(data)
def handle_command(self, command):
"""Process command from input pipe.
command (dict): data sent with following values:
- {'cmd': command number (int)} - command indicator from the AI manager
- {'gs': (message, game)} - a message and a new game state
command numbers:
-1: shutdown
1: enter game, includes uid and pNum
"""
if 'cmd' in command:
op = command['cmd'][0]
if op == -1: # Shutdown
log.info("AI Agent {0} received shutdown command".format(
self.label))
self.stop()
elif op == 1: # New game
self.uid = command['cmd'][1]
self.pNum = command['cmd'][2]
self.label = "{0}/{1}".format(self.name, self.pNum)
elif 'gs' in command:
self.msg = command['gs'][0] # May contain chats
self.game = command['gs'][1] # TODO: Protect hands of other players
self.gs = self.game.gs
self.hand = self.game.players[self.pNum].hand # Refresh hand
self.act()
else:
log.warn("Unknown daemon command: {0}".format(str(command)))
def run(self):
# Read from pipe -- does block ai thread, but start() is final action
readline = self.pipe.recv # Function references for speed++
handle_command = self.handle_command
while self.running:
try:
data = readline()
handle_command(data)
except KeyboardInterrupt:
self.stop()
except Exception as e:
self.stop()
log.exception("Killing daemon loop...")
return
def start(self, queue):
log.debug("AI Agent {0} listening to Manager".format(self.label))
self.queue = queue
self.running = True
self.run()
def stop(self):
log.debug("AI Agent {0} stopped listening to Manager"
"".format(self.label))
self.running = False
####################
# Message Transmitters -- Convenience methods for sending messages
####################
def bid(self, bid):
"""Send bid to server. Handle error response.
bid (int): bid value (0-5), assumed to have been legality checked
"""
res = self.send_data({'uid':self.uid, 'bid':bid}) # res=None is OK
# Bid may be illegal anyway
if res:
log.error("Agent made illegal bid of {0}; adjusting bid to PASS."
"".format(bid))
self.bid(0) # Pass
else:
if bid > 0:
log.info("{0} bids {1}".format(self.label, bid))
else:
log.info("{0} passes".format(self.label))
def chat(self, chat_msg):
"""Send chat-style message to Comet server (for debugging & hijinks).
chat_msg (str): message to send via chat channels
"""
self.send_data({'uid':self.uid, 'msg':chat_msg})
def play(self, card):
"""Send proposed play to server. Handle error response.
card (Card): card object, assumed to have been legality checked
"""
card_val = card.code
res = self.send_data({'uid':self.uid, 'card':card_val}) # res=None is OK
| "".format(str(card), self.label))
else:
log.info("{0} plays {1}".format(self.label,
str(card)))
self.hand.remove(card) # Complete play
####################
# Game Rules -- Adapted versions of core game functionality
####################
def is_legal_bid(self, bid):
"""Check if proposed bid is legal.
bid (int): bid value (0=PASS, 5=CINCH)
"""
if bid == 0:
return True # Always legal to pass
elif bid < 0 or bid > 5:
return False # Bid out of bounds
elif bid > self.gs.high_bid:
return True
elif bid == 5 & self.pNum == self.gs.dealer:
return True
else:
return False
def is_legal_play(self, card):
"""Check if proposed play is legal.
card (Card): proposed play
"""
if len(self.gs.cards_in_play) == 0:
return True # No restriction on what can be led
else:
if card.suit == self.gs.trump:
return True # Trump is always OK
else:
led = self.gs.cards_in_play[0].suit
if card.suit == led:
return True # Followed suit
else:
for c in self.hand:
if led == c.suit:
return False # Could've followed suit but didn't
return True # Throwing off
# get_legal_plays() and get_winning_card() will be reimplemented when HAL is repaired.
####################
# Intelligence -- Implement in subclasses
####################
def act(self):
"""Initiate action.
Called after processing each message block from Comet server.
Typical implementation is to check if AI is active player, and if so,
trigger the appropriate action (bid or play) and related analysis.
This is called regardless of who is active player. This allows, for
example, the AI to do preliminary play analysis before its own turn.
Subclasses are responsible for performing any needed checks.
Also, the current game mode should be considered.
"""
raise NotImplementedError("act() needs to be implemented in subclass.") | # Play may be deemed illegal by server anyway
if res:
# No fallback option defined for an illegal play
log.error("{1} made illegal play with card_val {0}" |
CreateUserCommand.ts | import { AppStreamClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppStreamClient";
import { CreateUserRequest, CreateUserResult } from "../models/models_0";
import {
deserializeAws_json1_1CreateUserCommand,
serializeAws_json1_1CreateUserCommand,
} from "../protocols/Aws_json1_1";
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http";
import { Command as $Command } from "@aws-sdk/smithy-client";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer, | export interface CreateUserCommandInput extends CreateUserRequest {}
export interface CreateUserCommandOutput extends CreateUserResult, __MetadataBearer {}
/**
* <p>Creates a new user in the user pool.</p>
*/
export class CreateUserCommand extends $Command<
CreateUserCommandInput,
CreateUserCommandOutput,
AppStreamClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: CreateUserCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: AppStreamClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<CreateUserCommandInput, CreateUserCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const clientName = "AppStreamClient";
const commandName = "CreateUserCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: CreateUserRequest.filterSensitiveLog,
outputFilterSensitiveLog: CreateUserResult.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: CreateUserCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_json1_1CreateUserCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<CreateUserCommandOutput> {
return deserializeAws_json1_1CreateUserCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
} | SerdeContext as __SerdeContext,
} from "@aws-sdk/types";
|
xzip.go | // Package xzip provides higher level types and functions on top of "arvhive/zip" package.
package xzip
import (
"archive/zip"
"fmt"
"io"
"net/http"
"os"
"path"
"time"
"github.com/yssk22/go/x/xerrors"
"github.com/yssk22/go/x/xtime"
)
const uint32max = (1 << 32) - 1
// Archiver is a object to build a zip. You can read multiple source stream and forward as a zip stream
// to the destination like file, http form, ...etc. Builder is implemented on top of io.Pile so you do
// zip compression with less buffering on memory.
//
// The basic usage is like:
//
// a, _ := NewRawSourceFromFile("a.txt")
// defer a.Close()
// b, _ := NewRawSourceFromFile("b.txt")
// defer b.Close()
//
// builder := NewArchiver(a, b)
// zip, _ := os.Create("ab.zip")
// defer zip.Close()
// io.Copy(zip, buiilder)
//
type Archiver struct {
sources []*RawSource
errors []error
pipeReader io.Reader
}
// Read reads the zip content from source.
func (s *Archiver) Read(p []byte) (int, error) {
n, err := s.pipeReader.Read(p)
if err != nil {
return n, err
}
if len(s.errors) != 0 {
return n, fmt.Errorf("zip source is broken: %v", s.errors[0])
}
return n, nil
}
// Close closes underlying sources
func (s *Archiver) Close() error {
me := xerrors.NewMultiError(len(s.sources))
for i, s := range s.sources {
me[i] = s.Close()
}
if me.HasError() {
return me
}
return nil
}
// NewArchiver returns a new *Archiver from multiple raw sources.
func NewArchiver(sources ...*RawSource) *Archiver {
archiver := &Archiver{
sources: sources,
errors: make([]error, 0),
}
pipeReader, pipeWriter := io.Pipe()
archiver.pipeReader = pipeReader
go func() {
zipWriter := zip.NewWriter(pipeWriter)
defer pipeWriter.Close()
defer zipWriter.Close()
for _, source := range archiver.sources {
header := source.ZipHeader()
header.Method = zip.Deflate
writer, err := zipWriter.CreateHeader(header)
if err != nil {
archiver.errors = append(archiver.errors, err)
continue
}
if _, err := io.Copy(writer, source); err != nil {
archiver.errors = append(archiver.errors, err)
continue
}
}
}()
return archiver
}
// RawSource is a source to create a Zip stream.
type RawSource struct {
Name string
Size uint64
ModifiedAt time.Time
Mode os.FileMode
source io.ReadCloser
}
// Read reads the data from stream
func (s *RawSource) Read(p []byte) (int, error) {
return s.source.Read(p)
}
// Close close the source stream
func (s *RawSource) Close() error {
return s.source.Close()
}
// ZipHeader returns *zip.FileHeader to create a zip stream
func (s *RawSource) ZipHeader() *zip.FileHeader {
fh := &zip.FileHeader{
Name: s.Name,
UncompressedSize64: s.Size,
}
fh.SetModTime(s.ModifiedAt)
fh.SetMode(s.Mode) | }
return fh
}
// NewRawSourceFromFile returns a new *RawSource from file path
func NewRawSourceFromFile(path string) (*RawSource, error) {
info, err := os.Stat(path)
if err != nil {
return nil, err
}
if info.IsDir() {
return nil, fmt.Errorf("%s is not a file", path)
}
file, err := os.Open(path)
if err != nil {
return nil, err
}
source := &RawSource{
Name: info.Name(),
Size: uint64(info.Size()),
ModifiedAt: info.ModTime(),
Mode: info.Mode(),
source: file,
}
return source, nil
}
// NewRawSourceFromURL returns a new *RawSource from URL
func NewRawSourceFromURL(url string, client *http.Client) (*RawSource, error) {
resp, err := client.Get(url)
if err != nil {
return nil, err
}
if resp.StatusCode != 200 {
return nil, fmt.Errorf("non-200 response (%d)", resp.StatusCode)
}
return &RawSource{
Name: path.Base(url),
Size: uint64(resp.ContentLength),
Mode: os.FileMode(0644),
ModifiedAt: xtime.Now(),
source: resp.Body,
}, nil
} | if s.Size > uint32max {
fh.UncompressedSize = uint32max
} else {
fh.UncompressedSize = uint32(fh.UncompressedSize64) |
alg-floyd_test.go | package core
import (
"fmt"
"log"
"testing"
"time"
)
func makeMockMap(mapData string) *Map {
m, err := NewMapFromString(mapData)
if err != nil {
log.Println(err)
return nil
}
err = m.Init(defaultVision, defaultWidth, defaultHeight)
if err != nil {
log.Println(err)
return nil
}
return m
}
func Test_initTnO(t *testing.T) {
type args struct {
m *Map
}
tests := []struct {
name string
args args
want []byte
want1 []int
}{
{
name: "initTnO - print map 1",
args: args{
m: makeMockMap(Map3),
},
want: nil,
want1: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
T, O := initTnO(tt.args.m)
m := tt.args.m
printMap(m, T, O, m.Width, m.Height)
// printObjectIndexes(O, m.Width, m.Height)
updateTunnelExits(m, T, O)
printMapObjects(m)
// if !reflect.DeepEqual(T, tt.want) {
// t.Errorf("initTnO() T = %v, want %v", T, tt.want)
// }
// if !reflect.DeepEqual(O, tt.want1) {
// t.Errorf("initTnO() O = %v, want %v", O, tt.want1)
// }
})
}
}
func Test_createGraph(t *testing.T) {
type args struct {
m *Map
}
tests := []struct {
name string
args args
want [][]int
want1 []byte
want2 []int
}{
{
name: "createGraph - print map 1",
args: args{
m: makeMockMap(Map1),
},
want: nil,
want1: nil,
want2: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
m := tt.args.m
start := time.Now()
G, T, O := createGraph(m)
//printGraph(G)
n := m.Width * m.Height
P := createMatrix(n)
floyd(G, P)
fmt.Printf("%v\n", time.Since(start))
printMap(m, T, O, m.Width, m.Height)
printPath(G, P, 0, 0)
printPath(G, P, 360, 360)
printPath(G, P, 0, 360)
printPath(G, P, 60, 86)
printPath(G, P, 0, 4)
printPath(G, P, 0, 385)
printPath(G, P, 14, 246)
printPath(G, P, 250, 150)
// if !reflect.DeepEqual(G, tt.want) {
// t.Errorf("createGraph() G = %v, want %v", G, tt.want)
// } | // if !reflect.DeepEqual(T, tt.want1) {
// t.Errorf("createGraph() T = %v, want %v", T, tt.want1)
// }
// if !reflect.DeepEqual(O, tt.want2) {
// t.Errorf("createGraph() O = %v, want %v", O, tt.want2)
// }
})
}
}
func printPath(G [][]int, P [][]int, i, j int) {
path := floydPath(P, i, j)
fmt.Printf("%v: %v\n", G[i][j], path)
} | |
closer.go | // Copyright 2018 Terence Tarvis. All rights reserved.
package main
import (
"go/ast"
)
func init() {
register("closeCheck",
"this tests if things with .Close() method have .Close() actually called on them",
closeCheck,
funcDecl)
}
func opensFile(f *File, x ast.Expr) bool {
/*
if(f.pkg.info.TypeOf(x) == nil) {
// should probably print something out here to notify the user
return false;
}
*/
/*
if(f.pkg.info.TypeOf(x).String() == "(*os.File, error)") {
return true
}
*/
if typeValue := f.pkg.info.TypeOf(x); typeValue != nil {
if typeValue.String() == "(*os.File, error)" {
return true;
}
}
return false;
}
// closesFile checks the remaining statements in a function body for a .Close() method
func | (f *File, stmts []ast.Stmt) bool {
for _, stmt := range stmts {
switch expr := stmt.(type) {
case *ast.AssignStmt:
rhs := expr.Rhs;
for _, x := range rhs {
name, err := getFullFuncName(x);
if err != nil {
warnf("issue, %v", err);
}
if(name == "file/Close") {
return true
}
}
case *ast.ExprStmt:
name, err := getFullFuncName(expr.X);
if err != nil {
warnf("issue, %v", err);
}
if(name == "file/Close") {
return true
}
}
}
return false
}
// for the time being this just checks a function to see if an opened file is closed
// http.MaxBytesReader should also be checked for a close
func closeCheck(f *File, node ast.Node) {
var formatString string = "Audit for Close() method called on opened file, %s"
// loop through block
// look for file open
// look for file close
// if no file close, report
// consider checking if the opened file is returned
// consider checking if an open file is an input and no file is returned
// it turns out you can open a file and not use it. What then?
// ugh I really hate this
// is walking the statements a better option?
if fun, ok := node.(*ast.FuncDecl); ok {
for i, stmts := range fun.Body.List {
switch stmt := stmts.(type) {
case *ast.AssignStmt:
rhs := stmt.Rhs;
for _, x := range rhs {
if(opensFile(f, x)) {
if(!closesFile(f, fun.Body.List[i:])) {
f.Reportf(stmt.Pos(), formatString,f.ASTString(x))
}
}
}
case *ast.ExprStmt:
if(opensFile(f, stmt.X)) {
if(!closesFile(f, fun.Body.List[i:])) {
f.Reportf(stmt.Pos(), formatString, f.ASTString(stmt.X))
}
}
case *ast.IfStmt:
if s, ok := stmt.Init.(*ast.AssignStmt); ok {
rhs := s.Rhs;
for _, x := range rhs {
if(opensFile(f, x )) {
if(!closesFile(f, fun.Body.List[i:])) {
f.Reportf(stmt.Pos(), formatString, f.ASTString(x))
}
}
}
}
default:
// do nothing for time being
}
}
}
return;
}
| closesFile |
apply.js | import {
getCommonContainer,
getCommonHeader,
getStepperObject
} from "egov-ui-framework/ui-config/screens/specs/utils";
import { getCurrentFinancialYear } from "../utils";
import { footer } from "./applyResource/footer";
import { nocDetails } from "./applyResource/nocDetails";
import { propertyDetails } from "./applyResource/propertyDetails";
import { propertyLocationDetails } from "./applyResource/propertyLocationDetails";
import { applicantDetails } from "./applyResource/applicantDetails";
import { documentDetails } from "./applyResource/documentDetails";
import { getQueryArg } from "egov-ui-framework/ui-utils/commons";
import {
prepareFinalObject,
handleScreenConfigurationFieldChange as handleField
} from "egov-ui-framework/ui-redux/screen-configuration/actions";
import { getTenantId } from "egov-ui-kit/utils/localStorageUtils";
import { httpRequest } from "../../../../ui-utils";
import {
sampleSearch,
sampleSingleSearch,
sampleDocUpload
} from "../../../../ui-utils/sampleResponses";
import set from "lodash/set";
import get from "lodash/get";
import {
prepareDocumentsUploadData,
getSearchResults,
furnishNocResponse,
setApplicationNumberBox
} from "../../../../ui-utils/commons";
import "./index.css";
export const stepsData = [
{ labelName: "NOC Details", labelKey: "NOC_COMMON_NOC_DETAILS" },
{ labelName: "Property Details", labelKey: "NOC_COMMON_PROPERTY_DETAILS" }, | export const stepper = getStepperObject(
{ props: { activeStep: 0 } },
stepsData
);
const applicationNumberContainer = () => {
const applicationNumber = getQueryArg(
window.location.href,
"applicationNumber"
);
if (applicationNumber)
return {
uiFramework: "custom-atoms-local",
moduleName: "egov-noc",
componentPath: "ApplicationNoContainer",
props: {
number: `${applicationNumber}`,
visibility: "hidden"
},
visible: true
};
else return {};
};
export const header = getCommonContainer({
header: getCommonHeader({
labelName: `Application for Fire NOC (${getCurrentFinancialYear()})`, //later use getFinancialYearDates
labelKey: "NOC_COMMON_APPLY_NOC"
}),
//applicationNumber: applicationNumberContainer()
applicationNumber: {
uiFramework: "custom-atoms-local",
moduleName: "egov-noc",
componentPath: "ApplicationNoContainer",
props: {
number: "NA"
},
visible: false
}
});
export const formwizardFirstStep = {
uiFramework: "custom-atoms",
componentPath: "Form",
props: {
id: "apply_form1"
},
children: {
nocDetails
}
};
export const formwizardSecondStep = {
uiFramework: "custom-atoms",
componentPath: "Form",
props: {
id: "apply_form2"
},
children: {
propertyDetails,
propertyLocationDetails
},
visible: false
};
export const formwizardThirdStep = {
uiFramework: "custom-atoms",
componentPath: "Form",
props: {
id: "apply_form3"
},
children: {
applicantDetails
},
visible: false
};
export const formwizardFourthStep = {
uiFramework: "custom-atoms",
componentPath: "Form",
props: {
id: "apply_form4"
},
children: {
documentDetails
},
visible: false
};
const getMdmsData = async (action, state, dispatch) => {
let tenantId =
get(
state.screenConfiguration.preparedFinalObject,
"FireNOCs[0].fireNOCDetails.propertyDetails.address.city"
) || getTenantId();
let mdmsBody = {
MdmsCriteria: {
tenantId: tenantId,
moduleDetails: [
{
moduleName: "common-masters",
masterDetails: [{ name: "OwnerType" }, { name: "OwnerShipCategory" }]
},
{
moduleName: "firenoc",
masterDetails: [{ name: "BuildingType" }, { name: "FireStations" }]
},
{
moduleName: "egov-location",
masterDetails: [
{
name: "TenantBoundary"
}
]
},
{
moduleName: "tenant",
masterDetails: [
{
name: "tenants"
}
]
},
{ moduleName: "FireNoc", masterDetails: [{ name: "Documents" }] }
]
}
};
try {
let payload = null;
payload = await httpRequest(
"post",
"/egov-mdms-service/v1/_search",
"_search",
[],
mdmsBody
);
dispatch(prepareFinalObject("applyScreenMdmsData", payload.MdmsRes));
} catch (e) {
console.log(e);
}
};
const getFirstListFromDotSeparated = list => {
list = list.map(item => {
if (item.active) {
return item.code.split(".")[0];
}
});
list = [...new Set(list)].map(item => {
return { code: item };
});
return list;
};
const setCardsIfMultipleBuildings = (state, dispatch) => {
if (
get(
state,
"screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.noOfBuildings"
) === "MULTIPLE"
) {
dispatch(
handleField(
"apply",
"components.div.children.formwizardSecondStep.children.propertyDetails.children.cardContent.children.propertyDetailsConatiner.children.buildingDataCard.children.singleBuildingContainer",
"props.style",
{ display: "none" }
)
);
dispatch(
handleField(
"apply",
"components.div.children.formwizardSecondStep.children.propertyDetails.children.cardContent.children.propertyDetailsConatiner.children.buildingDataCard.children.multipleBuildingContainer",
"props.style",
{}
)
);
}
};
export const prepareEditFlow = async (
state,
dispatch,
applicationNumber,
tenantId
) => {
const buildings = get(
state,
"screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.buildings",
[]
);
if (applicationNumber && buildings.length == 0) {
let response = await getSearchResults([
{
key: "tenantId",
value: tenantId
},
{ key: "applicationNumber", value: applicationNumber }
]);
// let response = sampleSingleSearch();
response = furnishNocResponse(response);
dispatch(prepareFinalObject("FireNOCs", get(response, "FireNOCs", [])));
if (applicationNumber) {
setApplicationNumberBox(state, dispatch, applicationNumber);
}
// Set no of buildings radiobutton and eventually the cards
let noOfBuildings =
get(response, "FireNOCs[0].fireNOCDetails.noOfBuildings", "SINGLE") ===
"MULTIPLE"
? "MULTIPLE"
: "SINGLE";
dispatch(
handleField(
"apply",
"components.div.children.formwizardSecondStep.children.propertyDetails.children.cardContent.children.propertyDetailsConatiner.children.buildingRadioGroup",
"props.value",
noOfBuildings
)
);
// Set no of buildings radiobutton and eventually the cards
let nocType =
get(response, "FireNOCs[0].fireNOCDetails.fireNOCType", "NEW") === "NEW"
? "NEW"
: "PROVISIONAL";
dispatch(
handleField(
"apply",
"components.div.children.formwizardFirstStep.children.nocDetails.children.cardContent.children.nocDetailsContainer.children.nocRadioGroup",
"props.value",
nocType
)
);
// setCardsIfMultipleBuildings(state, dispatch);
// Set sample docs upload
// dispatch(prepareFinalObject("documentsUploadRedux", sampleDocUpload()));
}
};
const screenConfig = {
uiFramework: "material-ui",
name: "apply",
beforeInitScreen: (action, state, dispatch) => {
const applicationNumber = getQueryArg(
window.location.href,
"applicationNumber"
);
const tenantId = getQueryArg(window.location.href, "tenantId");
const step = getQueryArg(window.location.href, "step");
//Set Module Name
set(state, "screenConfiguration.moduleName", "fire-noc");
// Set MDMS Data
getMdmsData(action, state, dispatch).then(response => {
// Set Dropdowns Data
let buildingUsageTypeData = get(
state,
"screenConfiguration.preparedFinalObject.applyScreenMdmsData.firenoc.BuildingType",
[]
);
buildingUsageTypeData = getFirstListFromDotSeparated(
buildingUsageTypeData
);
dispatch(
prepareFinalObject(
"applyScreenMdmsData.DropdownsData.BuildingUsageType",
buildingUsageTypeData
)
);
let ownershipCategory = get(
state,
"screenConfiguration.preparedFinalObject.applyScreenMdmsData.common-masters.OwnerShipCategory",
[]
);
ownershipCategory = getFirstListFromDotSeparated(ownershipCategory);
dispatch(
prepareFinalObject(
"applyScreenMdmsData.DropdownsData.OwnershipCategory",
ownershipCategory
)
);
// Set Documents Data (TEMP)
prepareDocumentsUploadData(state, dispatch);
});
// Search in case of EDIT flow
prepareEditFlow(state, dispatch, applicationNumber, tenantId);
// // Set Property City
// dispatch(prepareFinalObject("FireNOCs[0].fireNOCDetails.propertyDetails.address.city", getTenantId()));
// // Handle dependent dropdowns in edit flow
// set(
// "apply",
// "components.div.children.formwizardSecondStep.children.propertyDetails.children.cardContent.children.propertyDetailsConatiner.children.buildingDataCard.children.singleBuildingContainer.children.singleBuilding.children.cardContent.children.singleBuildingCard.children.buildingSubUsageType",
// { display: "none" }
// );
// let pfo = {};
// if (applicationNumber && !step) {
// pfo = searchSampleResponse();
// dispatch(prepareFinalObject("FireNOCs[0]", get(pfo, "FireNOCs[0]")));
// }
// if (step && get(state, "screenConfiguration.preparedFinalObject")) {
// pfo = get(
// state,
// "screenConfiguration.preparedFinalObject.FireNOCs[0]",
// {}
// );
// }
// Code to goto a specific step through URL
if (step && step.match(/^\d+$/)) {
let intStep = parseInt(step);
set(
action.screenConfig,
"components.div.children.stepper.props.activeStep",
intStep
);
let formWizardNames = [
"formwizardFirstStep",
"formwizardSecondStep",
"formwizardThirdStep",
"formwizardFourthStep"
];
for (let i = 0; i < 4; i++) {
set(
action.screenConfig,
`components.div.children.${formWizardNames[i]}.visible`,
i == step
);
set(
action.screenConfig,
`components.div.children.footer.children.previousButton.visible`,
step != 0
);
}
}
// Set defaultValues of radiobuttons and selectors
let noOfBuildings = get(
state,
"screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.noOfBuildings",
"SINGLE"
);
set(
state,
"screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.noOfBuildings",
noOfBuildings
);
let nocType = get(
state,
"screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.fireNOCType",
"PROVISIONAL"
);
set(
state,
"screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.fireNOCType",
nocType
);
// Preset multi-cards (CASE WHEN DATA PRE-LOADED)
if (
get(
state,
"screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.noOfBuildings"
) === "MULTIPLE"
) {
set(
action.screenConfig,
"components.div.children.formwizardSecondStep.children.propertyDetails.children.cardContent.children.propertyDetailsConatiner.children.buildingDataCard.children.singleBuildingContainer.props.style",
{ display: "none" }
);
set(
action.screenConfig,
"components.div.children.formwizardSecondStep.children.propertyDetails.children.cardContent.children.propertyDetailsConatiner.children.buildingDataCard.children.multipleBuildingContainer.props.style",
{}
);
}
if (
get(
state,
"screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.fireNOCType"
) === "PROVISIONAL"
) {
set(
action.screenConfig,
"components.div.children.formwizardFirstStep.children.nocDetails.children.cardContent.children.nocDetailsContainer.children.provisionalNocNumber.props.style",
{ visibility: "hidden" }
);
}
// if (
// get(
// state,
// "screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.applicantDetails.ownerShipType",
// ""
// ).includes("MULTIPLEOWNERS")
// ) {
// set(
// action.screenConfig,
// "components.div.children.formwizardThirdStep.children.applicantDetails.children.cardContent.children.applicantTypeContainer.children.singleApplicantContainer.props.style",
// { display: "none" }
// );
// set(
// action.screenConfig,
// "components.div.children.formwizardThirdStep.children.applicantDetails.children.cardContent.children.applicantTypeContainer.children.multipleApplicantContainer.props.style",
// {}
// );
// } else if (
// get(
// state,
// "screenConfiguration.preparedFinalObject.FireNOCs[0].fireNOCDetails.applicantDetails.ownerShipType",
// ""
// ).includes("INSTITUTIONAL")
// ) {
// set(
// action.screenConfig,
// "components.div.children.formwizardThirdStep.children.applicantDetails.children.cardContent.children.applicantTypeContainer.children.singleApplicantContainer.props.style",
// { display: "none" }
// );
// set(
// action.screenConfig,
// "components.div.children.formwizardThirdStep.children.applicantDetails.children.cardContent.children.applicantTypeContainer.children.institutionContainer.props.style",
// {}
// );
// set(
// action.screenConfig,
// "components.div.children.formwizardThirdStep.children.applicantDetails.children.cardContent.children.applicantTypeContainer.children.applicantSubType.props.style",
// {}
// );
// }
return action;
},
components: {
div: {
uiFramework: "custom-atoms",
componentPath: "Div",
props: {
className: "common-div-css"
},
children: {
headerDiv: {
uiFramework: "custom-atoms",
componentPath: "Container",
children: {
header: {
gridDefination: {
xs: 12,
sm: 10
},
...header
}
}
},
stepper,
formwizardFirstStep,
formwizardSecondStep,
formwizardThirdStep,
formwizardFourthStep,
footer
}
}
}
};
export default screenConfig; | { labelName: "Applicant Details", labelKey: "NOC_COMMON_APPLICANT_DETAILS" },
{ labelName: "Documents", labelKey: "NOC_COMMON_DOCUMENTS" }
]; |
user.go | package models
import (
"time"
"github.com/buzhiyun/gocron/internal/modules/utils"
)
const PasswordSaltLength = 6
// 用户model
type User struct {
Id int `json:"id" xorm:"pk autoincr notnull "`
Name string `json:"name" xorm:"varchar(32) notnull unique"` // 用户名
Password string `json:"-" xorm:"char(32) notnull "` // 密码
Salt string `json:"-" xorm:"char(6) notnull "` // 密码盐值
Email string `json:"email" xorm:"varchar(50) notnull unique default '' "` // 邮箱
Created time.Time `json:"created" xorm:"datetime notnull created"`
Updated time.Time `json:"updated" xorm:"datetime updated"`
IsAdmin int8 `json:"is_admin" xorm:"tinyint notnull default 0"` // 是否是管理员 1:管理员 0:普通用户
Status Status `json:"status" xorm:"tinyint notnull default 1"` // 1: 正常 0:禁用
BaseModel `json:"-" xorm:"-"`
}
// 新增
func (user *User) Create() (insertId int, err error) {
user.Status = Enabled
user.Salt = user.generateSalt()
user.Password = user.encryptPassword(user.Password, user.Salt)
_, err = Db.Insert(user)
if err == nil { | }
// 更新
func (user *User) Update(id int, data CommonMap) (int64, error) {
return Db.Table(user).ID(id).Update(data)
}
func (user *User) UpdatePassword(id int, password string) (int64, error) {
salt := user.generateSalt()
safePassword := user.encryptPassword(password, salt)
return user.Update(id, CommonMap{"password": safePassword, "salt": salt})
}
// 删除
func (user *User) Delete(id int) (int64, error) {
return Db.Id(id).Delete(user)
}
// 禁用
func (user *User) Disable(id int) (int64, error) {
return user.Update(id, CommonMap{"status": Disabled})
}
// 激活
func (user *User) Enable(id int) (int64, error) {
return user.Update(id, CommonMap{"status": Enabled})
}
// 系统内置用户,不走接口认证
var sysUser = map[string]interface{}{"root": nil, "admin": nil}
// 验证用户名和密码
func (user *User) Match(username, password string) bool {
where := "(name = ? OR email = ?) AND status =? "
_, err := Db.Where(where, username, username, Enabled).Get(user)
if err != nil {
return false
}
_, rootUser := sysUser[username]
if !rootUser {
//不是特权用户,通过septent portal 登录
ok, portalErr := user.SeptnetAuth(username, password)
if ok && portalErr == nil {
return true
} else {
return false
}
}
hashPassword := user.encryptPassword(password, user.Salt)
return hashPassword == user.Password
}
// 获取用户详情
func (user *User) Find(id int) error {
_, err := Db.Id(id).Get(user)
return err
}
// 用户名是否存在
func (user *User) UsernameExists(username string, uid int) (int64, error) {
if uid > 0 {
return Db.Where("name = ? AND id != ?", username, uid).Count(user)
}
return Db.Where("name = ?", username).Count(user)
}
// 邮箱地址是否存在
func (user *User) EmailExists(email string, uid int) (int64, error) {
if uid > 0 {
return Db.Where("email = ? AND id != ?", email, uid).Count(user)
}
return Db.Where("email = ?", email).Count(user)
}
func (user *User) List(params CommonMap) ([]User, error) {
user.parsePageAndPageSize(params)
list := make([]User, 0)
err := Db.Desc("id").Limit(user.PageSize, user.pageLimitOffset()).Find(&list)
return list, err
}
func (user *User) Total() (int64, error) {
return Db.Count(user)
}
// 密码加密
func (user *User) encryptPassword(password, salt string) string {
return utils.Md5(password + salt)
}
// 生成密码盐值
func (user *User) generateSalt() string {
return utils.RandString(PasswordSaltLength)
} | insertId = user.Id
}
return |
lib.rs | pub fn build_proverb(list: Vec<&str>) -> String {
let mut output = String::new();
if list.is_empty() {
return output; | if list.len() > 1 {
for i in 0..(list.len() - 1) {
output.push_str(&format!(
"For want of a {} the {} was lost.\n",
list[i],
list[i + 1]
));
}
}
output.push_str(&format!("And all for the want of a {}.", list[0]));
return output;
} | } |
azureAdlsGen2Mount.ts | // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
import * as pulumi from "@pulumi/pulumi";
import * as utilities from "./utilities";
export class AzureAdlsGen2Mount extends pulumi.CustomResource {
/**
* Get an existing AzureAdlsGen2Mount resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: AzureAdlsGen2MountState, opts?: pulumi.CustomResourceOptions): AzureAdlsGen2Mount {
return new AzureAdlsGen2Mount(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'databricks:index/azureAdlsGen2Mount:AzureAdlsGen2Mount';
/**
* Returns true if the given object is an instance of AzureAdlsGen2Mount. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is AzureAdlsGen2Mount {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === AzureAdlsGen2Mount.__pulumiType;
}
public readonly clientId!: pulumi.Output<string>;
public readonly clientSecretKey!: pulumi.Output<string>;
public readonly clientSecretScope!: pulumi.Output<string>;
public readonly clusterId!: pulumi.Output<string | undefined>;
public readonly containerName!: pulumi.Output<string>;
public readonly directory!: pulumi.Output<string>;
public readonly initializeFileSystem!: pulumi.Output<boolean>;
public readonly mountName!: pulumi.Output<string>;
public /*out*/ readonly source!: pulumi.Output<string>;
public readonly storageAccountName!: pulumi.Output<string>;
public readonly tenantId!: pulumi.Output<string>;
/**
* Create a AzureAdlsGen2Mount resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: AzureAdlsGen2MountArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: AzureAdlsGen2MountArgs | AzureAdlsGen2MountState, opts?: pulumi.CustomResourceOptions) {
let resourceInputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as AzureAdlsGen2MountState | undefined;
resourceInputs["clientId"] = state ? state.clientId : undefined;
resourceInputs["clientSecretKey"] = state ? state.clientSecretKey : undefined;
resourceInputs["clientSecretScope"] = state ? state.clientSecretScope : undefined;
resourceInputs["clusterId"] = state ? state.clusterId : undefined;
resourceInputs["containerName"] = state ? state.containerName : undefined;
resourceInputs["directory"] = state ? state.directory : undefined;
resourceInputs["initializeFileSystem"] = state ? state.initializeFileSystem : undefined;
resourceInputs["mountName"] = state ? state.mountName : undefined;
resourceInputs["source"] = state ? state.source : undefined;
resourceInputs["storageAccountName"] = state ? state.storageAccountName : undefined;
resourceInputs["tenantId"] = state ? state.tenantId : undefined;
} else {
const args = argsOrState as AzureAdlsGen2MountArgs | undefined;
if ((!args || args.clientId === undefined) && !opts.urn) {
throw new Error("Missing required property 'clientId'");
}
if ((!args || args.clientSecretKey === undefined) && !opts.urn) {
throw new Error("Missing required property 'clientSecretKey'");
}
if ((!args || args.clientSecretScope === undefined) && !opts.urn) {
throw new Error("Missing required property 'clientSecretScope'");
}
if ((!args || args.containerName === undefined) && !opts.urn) {
throw new Error("Missing required property 'containerName'");
}
if ((!args || args.initializeFileSystem === undefined) && !opts.urn) {
throw new Error("Missing required property 'initializeFileSystem'");
}
if ((!args || args.mountName === undefined) && !opts.urn) {
throw new Error("Missing required property 'mountName'"); | if ((!args || args.storageAccountName === undefined) && !opts.urn) {
throw new Error("Missing required property 'storageAccountName'");
}
if ((!args || args.tenantId === undefined) && !opts.urn) {
throw new Error("Missing required property 'tenantId'");
}
resourceInputs["clientId"] = args ? args.clientId : undefined;
resourceInputs["clientSecretKey"] = args ? args.clientSecretKey : undefined;
resourceInputs["clientSecretScope"] = args ? args.clientSecretScope : undefined;
resourceInputs["clusterId"] = args ? args.clusterId : undefined;
resourceInputs["containerName"] = args ? args.containerName : undefined;
resourceInputs["directory"] = args ? args.directory : undefined;
resourceInputs["initializeFileSystem"] = args ? args.initializeFileSystem : undefined;
resourceInputs["mountName"] = args ? args.mountName : undefined;
resourceInputs["storageAccountName"] = args ? args.storageAccountName : undefined;
resourceInputs["tenantId"] = args ? args.tenantId : undefined;
resourceInputs["source"] = undefined /*out*/;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(AzureAdlsGen2Mount.__pulumiType, name, resourceInputs, opts);
}
}
/**
* Input properties used for looking up and filtering AzureAdlsGen2Mount resources.
*/
export interface AzureAdlsGen2MountState {
clientId?: pulumi.Input<string>;
clientSecretKey?: pulumi.Input<string>;
clientSecretScope?: pulumi.Input<string>;
clusterId?: pulumi.Input<string>;
containerName?: pulumi.Input<string>;
directory?: pulumi.Input<string>;
initializeFileSystem?: pulumi.Input<boolean>;
mountName?: pulumi.Input<string>;
source?: pulumi.Input<string>;
storageAccountName?: pulumi.Input<string>;
tenantId?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a AzureAdlsGen2Mount resource.
*/
export interface AzureAdlsGen2MountArgs {
clientId: pulumi.Input<string>;
clientSecretKey: pulumi.Input<string>;
clientSecretScope: pulumi.Input<string>;
clusterId?: pulumi.Input<string>;
containerName: pulumi.Input<string>;
directory?: pulumi.Input<string>;
initializeFileSystem: pulumi.Input<boolean>;
mountName: pulumi.Input<string>;
storageAccountName: pulumi.Input<string>;
tenantId: pulumi.Input<string>;
} | } |
app.component.spec.ts | /* tslint:disable:no-unused-variable */
import { TestBed, async } from '@angular/core/testing';
import { RouterModule, Router } from '@angular/router';
import { APP_BASE_HREF } from '@angular/common';
import { AppComponent } from './app.component';
import { AuthService } from './auth/auth.service';
describe('AppComponent', () => {
beforeEach(() => {
TestBed.configureTestingModule({
imports: [
RouterModule.forRoot([])
],
providers: [
{ provide: AuthService, useValue: { handleAuthentication: () => {}, isAuthenticated: () => {} } },
{ provide: APP_BASE_HREF, useValue: '/' }
],
declarations: [
AppComponent
],
});
TestBed.compileComponents();
});
it('should create the app', async(() => { | const app = fixture.debugElement.componentInstance;
expect(app).toBeTruthy();
}));
}); | const fixture = TestBed.createComponent(AppComponent); |
caller.py | # -*- coding: utf-8 -*-
'''
The caller module is used as a front-end to manage direct calls to the salt
minion modules.
'''
# Import python libs
from __future__ import print_function
import os
import sys
import logging
import datetime
import traceback
# Import salt libs
import salt.exitcodes
import salt.loader
import salt.minion
import salt.output
import salt.payload
import salt.transport
import salt.utils.args
from salt._compat import string_types
from salt.log import LOG_LEVELS
from salt.utils import print_cli
log = logging.getLogger(__name__)
try:
from raet import raeting, nacling
from raet.lane.stacking import LaneStack
from raet.lane.yarding import RemoteYard
except ImportError:
# Don't die on missing transport libs since only one transport is required
pass
# Custom exceptions
from salt.exceptions import (
SaltClientError,
CommandNotFoundError,
CommandExecutionError,
SaltInvocationError,
)
class Caller(object):
'''
Factory class to create salt-call callers for different transport
'''
@staticmethod
def factory(opts, **kwargs):
# Default to ZeroMQ for now
ttype = 'zeromq'
# determine the ttype
if 'transport' in opts:
ttype = opts['transport']
elif 'transport' in opts.get('pillar', {}).get('master', {}):
ttype = opts['pillar']['master']['transport']
# switch on available ttypes
if ttype == 'zeromq':
return ZeroMQCaller(opts, **kwargs)
elif ttype == 'raet':
return RAETCaller(opts, **kwargs)
else:
raise Exception('Callers are only defined for ZeroMQ and raet')
# return NewKindOfCaller(opts, **kwargs)
class ZeroMQCaller(object):
'''
Object to wrap the calling of local salt modules for the salt-call command
'''
def __init__(self, opts):
'''
Pass in the command line options
'''
self.opts = opts
self.opts['caller'] = True
self.serial = salt.payload.Serial(self.opts)
# Handle this here so other deeper code which might
# be imported as part of the salt api doesn't do a
# nasty sys.exit() and tick off our developer users
try:
self.minion = salt.minion.SMinion(opts)
except SaltClientError as exc:
raise SystemExit(str(exc))
def call(self):
'''
Call the module
'''
# raet channel here
ret = {}
fun = self.opts['fun']
ret['jid'] = '{0:%Y%m%d%H%M%S%f}'.format(datetime.datetime.now())
proc_fn = os.path.join(
salt.minion.get_proc_dir(self.opts['cachedir']),
ret['jid']
)
if fun not in self.minion.functions:
sys.stderr.write('Function {0} is not available\n'.format(fun))
sys.exit(-1)
try:
sdata = {
'fun': fun,
'pid': os.getpid(),
'jid': ret['jid'],
'tgt': 'salt-call'}
args, kwargs = salt.minion.load_args_and_kwargs(
self.minion.functions[fun],
salt.utils.args.parse_input(self.opts['arg']),
data=sdata)
try:
with salt.utils.fopen(proc_fn, 'w+b') as fp_:
fp_.write(self.serial.dumps(sdata))
except NameError:
# Don't require msgpack with local
pass
except IOError:
sys.stderr.write(
'Cannot write to process directory. '
'Do you have permissions to '
'write to {0} ?\n'.format(proc_fn))
func = self.minion.functions[fun]
try:
ret['return'] = func(*args, **kwargs)
except TypeError as exc:
trace = traceback.format_exc()
sys.stderr.write('Passed invalid arguments: {0}\n'.format(exc))
active_level = LOG_LEVELS.get(
self.opts['log_level'].lower(), logging.ERROR)
if active_level <= logging.DEBUG:
sys.stderr.write(trace)
sys.exit(salt.exitcodes.EX_GENERIC)
try:
ret['retcode'] = sys.modules[
func.__module__].__context__.get('retcode', 0)
except AttributeError:
ret['retcode'] = 1
except (CommandExecutionError) as exc:
msg = 'Error running \'{0}\': {1}\n'
active_level = LOG_LEVELS.get(
self.opts['log_level'].lower(), logging.ERROR)
if active_level <= logging.DEBUG:
sys.stderr.write(traceback.format_exc())
sys.stderr.write(msg.format(fun, str(exc)))
sys.exit(salt.exitcodes.EX_GENERIC)
except CommandNotFoundError as exc:
msg = 'Command required for \'{0}\' not found: {1}\n'
sys.stderr.write(msg.format(fun, str(exc)))
sys.exit(salt.exitcodes.EX_GENERIC)
try:
os.remove(proc_fn)
except (IOError, OSError):
pass
if hasattr(self.minion.functions[fun], '__outputter__'):
oput = self.minion.functions[fun].__outputter__
if isinstance(oput, string_types):
ret['out'] = oput
is_local = self.opts['local'] or self.opts.get(
'file_client', False) == 'local'
returners = self.opts.get('return', '').split(',')
if (not is_local) or returners:
ret['id'] = self.opts['id']
ret['fun'] = fun
ret['fun_args'] = self.opts['arg']
for returner in returners:
try:
ret['success'] = True
self.minion.returners['{0}.returner'.format(returner)](ret)
except Exception:
pass
# return the job infos back up to the respective minion's master
if not is_local:
try:
mret = ret.copy()
mret['jid'] = 'req'
self.return_pub(mret)
except Exception:
pass
# close raet channel here
return ret
def return_pub(self, ret):
'''
Return the data up to the master
'''
channel = salt.transport.Channel.factory(self.opts, usage='salt_call')
load = {'cmd': '_return', 'id': self.opts['id']}
for key, value in ret.items():
load[key] = value
channel.send(load)
def print_docs(self):
'''
Pick up the documentation for all of the modules and print it out.
'''
docs = {}
for name, func in self.minion.functions.items():
if name not in docs:
if func.__doc__:
docs[name] = func.__doc__
for name in sorted(docs):
if name.startswith(self.opts.get('fun', '')):
print_cli('{0}:\n{1}\n'.format(name, docs[name]))
def print_grains(self):
'''
Print out the grains
'''
grains = salt.loader.grains(self.opts)
salt.output.display_output({'local': grains}, 'grains', self.opts)
def run(self):
'''
Execute the salt call logic
'''
try:
ret = self.call()
out = ret.get('out', 'nested')
if self.opts['metadata']:
print_ret = ret
out = 'nested'
else:
print_ret = ret.get('return', {})
salt.output.display_output(
{'local': print_ret},
out,
self.opts)
if self.opts.get('retcode_passthrough', False):
sys.exit(ret['retcode'])
except SaltInvocationError as err:
raise SystemExit(err)
class RAETCaller(ZeroMQCaller):
'''
Object to wrap the calling of local salt modules for the salt-call command
when transport is raet
'''
def | (self, opts):
'''
Pass in the command line options
'''
self.stack = self._setup_caller_stack(opts)
salt.transport.jobber_stack = self.stack
super(RAETCaller, self).__init__(opts)
def run(self):
'''
Execute the salt call logic
'''
try:
ret = self.call()
self.stack.server.close()
salt.transport.jobber_stack = None
if self.opts['metadata']:
print_ret = ret
else:
print_ret = ret.get('return', {})
salt.output.display_output(
{'local': print_ret},
ret.get('out', 'nested'),
self.opts)
if self.opts.get('retcode_passthrough', False):
sys.exit(ret['retcode'])
except SaltInvocationError as err:
raise SystemExit(err)
def _setup_caller_stack(self, opts):
'''
Setup and return the LaneStack and Yard used by by channel when global
not already setup such as in salt-call to communicate to-from the minion
'''
mid = opts['id']
sockdirpath = opts['sock_dir']
uid = nacling.uuid(size=18)
name = 'caller' + uid
stack = LaneStack(name=name,
lanename=mid,
sockdirpath=sockdirpath)
stack.Pk = raeting.packKinds.pack
stack.addRemote(RemoteYard(stack=stack,
name='manor',
lanename=mid,
dirpath=sockdirpath))
log.debug("Created Caller Jobber Stack {0}\n".format(stack.name))
return stack
| __init__ |
NavigationLink.tsx | import React from 'react'
import { NavLink, NavLinkProps } from 'react-router-dom'
interface Props extends NavLinkProps {
to: string
}
export class | extends React.Component<Props> {
render() {
const { to, children } = this.props
return (
<NavLink to={to}
className="ml-4 px-3 py-2 rounded-md text-sm font-medium text-black hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700"
activeClassName="bg-gray-900 text-white"
>
{children}
</NavLink>
)
}
} | NavigationLink |
actions-navigation.tsx | import React from 'react'
import { Flex, NcssNode, Wheel } from '@wheelroom/core'
import { NavigationSegment } from '../../navigation-segment/navigation-segment'
import { NavigationSegmentNcssTree } from '../../navigation-segment/ncss-tree'
import { NavigationModel } from '../model'
import { getNavSegments } from '../get-nav-segments'
import { ThemeButton } from '../../navigation-section/header/theme-button'
export interface ActionsNavigationNcssTree extends NcssNode {
segment: NavigationSegmentNcssTree
themeButton: NcssNode
} | style: ActionsNavigationNcssTree
}
export interface ActionsNavigationProps {
buttonName: string | undefined
hideActions?: boolean
hideThemeButton?: boolean
maxSegments: number
navigation: NavigationModel[]
toggleTheme: () => void
wheel: ActionsNavigationWheel
}
export const ActionsNavigation = (props: ActionsNavigationProps) => {
if (props.hideActions) {
return null
}
const segments = getNavSegments(props.navigation, 'actions')
return (
<Flex is="div" wheel={props.wheel}>
<NavigationSegment
headingElementName="h3"
hideActionHeading={false}
hideActionIcon={true}
hideSegmentAbstract={true}
hideSegmentHeading={true}
maxSegments={props.maxSegments}
navigationSegment={segments}
wheel={{ ...props.wheel, style: props.wheel.style.segment }}
/>
<ThemeButton
buttonName={props.buttonName}
hideThemeButton={props.hideThemeButton}
toggleTheme={props.toggleTheme}
wheel={{ ...props.wheel, style: props.wheel.style.themeButton }}
/>
</Flex>
)
} |
export interface ActionsNavigationWheel extends Wheel { |
clear.rs | use token::CommandData;
use std::io::Write;
pub fn run(cmd: CommandData) -> Result<(), String> |
#[test]
fn test_bye_run() {
use std::fs::File;
let cmd = CommandData {
program: "clear".to_string(),
options: vec![],
out: Some(File::create("/dev/null").unwrap()),
input: None,
};
assert!(run(cmd).is_ok());
}
| {
let mut out = cmd.out.unwrap();
out.write_all("\x1b[2J\x1b[1;1H".as_bytes()).unwrap();
return Ok(());
} |
renderer.py | from rest_framework.renderers import TemplateHTMLRenderer
class MyTemplateHTMLRenderer(TemplateHTMLRenderer):
| def get_template_context(self, data, renderer_context):
response = renderer_context['response']
if response.exception:
data['status_code'] = response.status_code
return {'data': data} |
|
test_helpers.py | from ssmpfwd.helpers import verify_plugin_version, verbose_debug_quiet, time_decorator
from unittest.mock import MagicMock, patch
import unittest
class TestVerifyPluginVersion(unittest.TestCase):
@patch("ssmpfwd.helpers.subprocess")
def test_verify_plugin_version_success(self, mock_subprocess):
result = mock_subprocess.run()
result.stdout = b"9.8.3"
self.assertTrue(verify_plugin_version("9.8.3"))
@patch("ssmpfwd.helpers.subprocess")
def test_verify_plugin_version_fail(self, mock_subprocess):
with self.assertLogs("ssmpfwd.helpers", level="INFO") as cm:
result = mock_subprocess.run()
result.stdout = b"1.8.1"
self.assertFalse(verify_plugin_version("9.2.3"))
self.assertEqual(cm.output[0], "ERROR:ssmpfwd.helpers:session-manager-plugin version 1.8.1 is installed, 9.2.3 is required")
class TestVerboseDebugQuiet(unittest.TestCase):
import logging
def setUp(self):
@verbose_debug_quiet
def test_func():
pass
self.vdq = test_func
self.vdq()
def test_quiet(self):
option_name = "quiet"
| flag_value = self.logging.DEBUG
self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values")
def test_verbose(self):
flag_value = self.logging.INFO
self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values")
def test_default_loglevel(self):
flag_value = self.logging.WARN
self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values")
class TestTimeDecorator(unittest.TestCase):
from time import sleep
def setUp(self):
@time_decorator
def test_func():
self.sleep(0.5)
self.time_decorated_method = test_func
def test_time_decorartor(self):
with self.assertLogs("ssmpfwd.helpers", level="INFO") as cm:
self.time_decorated_method()
self.assertEqual(cm.output[0], "INFO:ssmpfwd.helpers:[*] starting test_func") | self.assertTrue(any([p.name == option_name for p in self.vdq.__click_params__]), msg=f"Can not find {option_name} in option parameters")
def test_debug(self):
|
string1.py | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
if count >= 10:
count = 'many'
return 'Number of donuts: '+str(count)
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
slen = len(s)
if slen >= 2:
return s[0]+s[1]+s[-2]+s[-1]
else:
return ''
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
save = s[0]
s_modified = s.replace(s[0],'*')
return save+s_modified[1:]
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def | (a, b):
return b[0]+b[1]+a[2:]+" "+a[0]+a[1]+b[2:]
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print 'donuts'
# Each line calls donuts, compares its result to the expected for that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print
print 'both_ends'
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print
print 'fix_start'
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print
print 'mix_up'
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
| mix_up |
typing.py | """
hydrofunctions.typing
~~~~~~~~~~~~~~~~~~~~~
This module contains functions for testing that user input is valid.
Why 'pre-check' user imputs, instead of using standard
python duck typing? These functions are meant to enhance an interactive
session for the user, and will check a user's parameters
before requesting data from an online resource. Otherwise, the server will
return a 404 code and the user will have no idea why. Hydrofunctions tries to raise
an exception (usually a TypeError) before a request is made, so that the user
can fix their request. It also tries to provide a helpful error message to an
interactive session user.
Suggested format for these functions:
* first check that the input is a string,
* then do a regular expression to check that the input is more or less valid.
* raise exceptions when user input breaks format.
-----
"""
from __future__ import absolute_import, print_function, division, unicode_literals
import re
def check_parameter_string(candidate, param):
"""Checks that a parameter is a string or a list of strings.
"""
parameters = {
"site": "NWIS station id(s) should be a string or list of strings,"
+ "often in the form of an eight digit number enclosed in quotes.",
"parameterCd": "NWIS parameter codes are five-digit strings that specify "
+ "the parameter that is being measured at the site. Common "
+ "codes are '00060' for stream stage in feet, '00065' for "
+ "stream discharge in cubic feet per second, and '72019' for "
+ "groundwater levels. Not all sites collect data for all "
+ "parameters. See a complete list of physical parameters here: "
+ "https://help.waterdata.usgs.gov/parameter_cd?group_cd=PHY "
+ "You may request multiple parameters by submitting a comma-"
+ "delimited string of codes with no spaces, or by submitting "
+ "a list of codes, like this: parameterCd = '00065,00060' or "
+ "parameterCd = ['00065', '00060'] ",
"county": "The NWIS county parameter accepts a five-digit string or "
+ "a list of five-digit strings to select all of the sites "
+ "within a county or list of counties. "
+ "Example: '51059' or ['51059', '51061'] are acceptable.",
"state": "This parameter uses US two-letter postal codes "
+ "such as 'MD' for Maryland or 'AZ' for Arizona.",
"default": "This parameter should be a string or a list of strings.",
}
if param in parameters:
msg = parameters[param] + " Actual value: {}".format(candidate)
else:
msg = (
"This parameter should be a string or a list of strings."
+ " Actual value: {}".format(candidate)
)
if candidate is None:
return None
elif isinstance(candidate, str) and candidate:
return candidate
elif (isinstance(candidate, list) or isinstance(candidate, tuple)) and candidate:
for s in candidate:
if not isinstance(s, str):
raise TypeError(msg + " bad element: {}".format(s))
return ",".join([str(s) for s in candidate])
else:
raise TypeError(msg)
def check_NWIS_bBox(input):
"""Checks that the USGS bBox is valid.
"""
msg = (
"NWIS bBox should be a string, list of strings, or tuple "
+ "containing the longitude and latitude of the lower left corner "
+ "of the bounding box, followed by the longitude and latitude "
+ "of the upper right corner of the bounding box. Most often in "
+ 'the form of "ll_long,ll_lat,ur_long,ur_lat" . '
+ "All latitude and longitude values should have less than 8 "
+ "places. "
+ "Actual value: {}".format(input)
)
if input is None:
return None
# assume that if it is a string it will be fine as is.
# don't accept a series of sites in a single string.
# Test for and reject empty strings: empty strings are false.
if isinstance(input, str) and input:
t = input.split(",")
if len(t) < 4:
raise TypeError(msg)
return input
# test for input is a list and it is not empty
elif (isinstance(input, list) or isinstance(input, tuple)) and input:
if len(input) < 4:
raise TypeError(msg)
# format: [-83.000000, 36.500000, -81.000000, 38.500000] ==> '-83.000000,36.500000,-81.000000,38.500000'
return ",".join([str(s) for s in input])
else:
raise TypeError(msg)
def check_NWIS_service(input):
"""Checks that the service is valid: either 'iv' or 'dv'"""
if input is None:
return None
if input == "iv" or input == "dv":
return input
else:
raise TypeError(
"The NWIS service type accepts 'dv' for daily values, "
"or 'iv' for instantaneous values. Actual value: "
"{}".format(input)
)
def check_datestr(input):
"""Checks that the start_date or end_date parameter is in yyyy-mm-dd format.
"""
# Use a regular expression to ensure in form of yyyy-mm-dd
if input is None:
return None
pattern = r"[1-2]\d\d\d-[0-1]\d-[0-3]\d\Z"
datestr = re.compile(pattern)
if isinstance(input, str) and datestr.match(input):
return input
else:
raise TypeError(
"Dates should be a string in the form of 'YYYY-MM-DD' "
"enclosed in quotes. Actual value: {}".format(input)
)
def check_period(input):
"""Checks that the period parameter in is the P##D format, where ## is
the number of days before now.
"""
if input is None:
return None
# TODO: check how many days maximum NWIS is willing to respond to.
# This pattern sets a maximum of 999 days (between 1 and 3 digits).
pattern = r"^P\d{1,3}D$"
periodstr = re.compile(pattern)
if isinstance(input, str) and periodstr.match(input):
return input
else:
| raise TypeError(
"Period should be a string in the form of 'PxD', "
"where x represents the number of days before today, "
"with a maximum of 999 days. "
"Example: to request the previous 10 days, "
"enter 'period=P10D'. Actual value entered: {}".format(input)
) |
|
test_gelu_op.py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# | # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('../../common')
sys.path.append('../../../')
import test_gelu_op_base
from auto_scan_test import AutoScanTest, IgnoreReasons
from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place
import unittest
import hypothesis
from hypothesis import given, settings, seed, example, assume
class TestGeluOp(AutoScanTest):
def is_program_valid(self, program_config: ProgramConfig) -> bool:
return True
def sample_program_configs(self, draw):
return test_gelu_op_base.sample_program_configs(draw)
def sample_predictor_configs(self):
config = CxxConfig()
config.set_valid_places({Place(TargetType.X86, PrecisionType.FP32, DataLayoutType.NCHW)})
yield config, ["gelu"], (1e-5, 1e-5)
def add_ignore_pass_case(self):
pass
def test(self, *args, **kwargs):
self.run_and_statis(quant=False, max_examples=25)
if __name__ == "__main__":
unittest.main() | # Licensed under the Apache License, Version 2.0 (the "License"); |
db.go | package host
import (
"fmt"
"time"
"github.com/evergreen-ci/evergreen"
"github.com/evergreen-ci/evergreen/db"
"github.com/evergreen-ci/evergreen/model/build"
"github.com/evergreen-ci/evergreen/model/credentials"
"github.com/evergreen-ci/evergreen/model/distro"
"github.com/evergreen-ci/evergreen/model/task"
"github.com/evergreen-ci/evergreen/util"
"github.com/mongodb/anser/bsonutil"
adb "github.com/mongodb/anser/db"
"github.com/mongodb/grip"
"github.com/pkg/errors"
"go.mongodb.org/mongo-driver/bson"
mgobson "gopkg.in/mgo.v2/bson"
)
const (
// Collection is the name of the MongoDB collection that stores hosts.
Collection = "hosts"
)
var (
IdKey = bsonutil.MustHaveTag(Host{}, "Id")
DNSKey = bsonutil.MustHaveTag(Host{}, "Host")
SecretKey = bsonutil.MustHaveTag(Host{}, "Secret")
UserKey = bsonutil.MustHaveTag(Host{}, "User")
TagKey = bsonutil.MustHaveTag(Host{}, "Tag")
DistroKey = bsonutil.MustHaveTag(Host{}, "Distro")
ProviderKey = bsonutil.MustHaveTag(Host{}, "Provider")
IPKey = bsonutil.MustHaveTag(Host{}, "IP")
ProvisionedKey = bsonutil.MustHaveTag(Host{}, "Provisioned")
ProvisionTimeKey = bsonutil.MustHaveTag(Host{}, "ProvisionTime")
ExtIdKey = bsonutil.MustHaveTag(Host{}, "ExternalIdentifier")
RunningTaskKey = bsonutil.MustHaveTag(Host{}, "RunningTask")
RunningTaskGroupKey = bsonutil.MustHaveTag(Host{}, "RunningTaskGroup")
RunningTaskBuildVariantKey = bsonutil.MustHaveTag(Host{}, "RunningTaskBuildVariant")
RunningTaskVersionKey = bsonutil.MustHaveTag(Host{}, "RunningTaskVersion")
RunningTaskProjectKey = bsonutil.MustHaveTag(Host{}, "RunningTaskProject")
TaskDispatchTimeKey = bsonutil.MustHaveTag(Host{}, "TaskDispatchTime")
CreateTimeKey = bsonutil.MustHaveTag(Host{}, "CreationTime")
ExpirationTimeKey = bsonutil.MustHaveTag(Host{}, "ExpirationTime")
TerminationTimeKey = bsonutil.MustHaveTag(Host{}, "TerminationTime")
LTCTimeKey = bsonutil.MustHaveTag(Host{}, "LastTaskCompletedTime")
LTCTaskKey = bsonutil.MustHaveTag(Host{}, "LastTask")
LTCGroupKey = bsonutil.MustHaveTag(Host{}, "LastGroup")
LTCBVKey = bsonutil.MustHaveTag(Host{}, "LastBuildVariant")
LTCVersionKey = bsonutil.MustHaveTag(Host{}, "LastVersion")
LTCProjectKey = bsonutil.MustHaveTag(Host{}, "LastProject")
StatusKey = bsonutil.MustHaveTag(Host{}, "Status")
AgentRevisionKey = bsonutil.MustHaveTag(Host{}, "AgentRevision")
NeedsNewAgentKey = bsonutil.MustHaveTag(Host{}, "NeedsNewAgent")
NeedsNewAgentMonitorKey = bsonutil.MustHaveTag(Host{}, "NeedsNewAgentMonitor")
JasperCredentialsIDKey = bsonutil.MustHaveTag(Host{}, "JasperCredentialsID")
JasperDeployAttemptsKey = bsonutil.MustHaveTag(Host{}, "JasperDeployAttempts")
StartedByKey = bsonutil.MustHaveTag(Host{}, "StartedBy")
InstanceTypeKey = bsonutil.MustHaveTag(Host{}, "InstanceType")
VolumeSizeKey = bsonutil.MustHaveTag(Host{}, "VolumeTotalSize")
VolumeIDsKey = bsonutil.MustHaveTag(Host{}, "VolumeIDs")
NotificationsKey = bsonutil.MustHaveTag(Host{}, "Notifications")
LastCommunicationTimeKey = bsonutil.MustHaveTag(Host{}, "LastCommunicationTime")
UserHostKey = bsonutil.MustHaveTag(Host{}, "UserHost")
ZoneKey = bsonutil.MustHaveTag(Host{}, "Zone")
ProjectKey = bsonutil.MustHaveTag(Host{}, "Project")
ProvisionOptionsKey = bsonutil.MustHaveTag(Host{}, "ProvisionOptions")
ProvisionAttemptsKey = bsonutil.MustHaveTag(Host{}, "ProvisionAttempts")
TaskCountKey = bsonutil.MustHaveTag(Host{}, "TaskCount")
StartTimeKey = bsonutil.MustHaveTag(Host{}, "StartTime")
AgentStartTimeKey = bsonutil.MustHaveTag(Host{}, "AgentStartTime")
ComputeCostPerHourKey = bsonutil.MustHaveTag(Host{}, "ComputeCostPerHour")
TotalCostKey = bsonutil.MustHaveTag(Host{}, "TotalCost")
TotalIdleTimeKey = bsonutil.MustHaveTag(Host{}, "TotalIdleTime")
HasContainersKey = bsonutil.MustHaveTag(Host{}, "HasContainers")
ParentIDKey = bsonutil.MustHaveTag(Host{}, "ParentID")
ContainerImagesKey = bsonutil.MustHaveTag(Host{}, "ContainerImages")
ContainerBuildAttempt = bsonutil.MustHaveTag(Host{}, "ContainerBuildAttempt")
LastContainerFinishTimeKey = bsonutil.MustHaveTag(Host{}, "LastContainerFinishTime")
SpawnOptionsKey = bsonutil.MustHaveTag(Host{}, "SpawnOptions")
ContainerPoolSettingsKey = bsonutil.MustHaveTag(Host{}, "ContainerPoolSettings")
RunningTeardownForTaskKey = bsonutil.MustHaveTag(Host{}, "RunningTeardownForTask")
RunningTeardownSinceKey = bsonutil.MustHaveTag(Host{}, "RunningTeardownSince")
InstanceTagsKey = bsonutil.MustHaveTag(Host{}, "InstanceTags")
SpawnOptionsTaskIDKey = bsonutil.MustHaveTag(SpawnOptions{}, "TaskID")
SpawnOptionsBuildIDKey = bsonutil.MustHaveTag(SpawnOptions{}, "BuildID")
SpawnOptionsTimeoutKey = bsonutil.MustHaveTag(SpawnOptions{}, "TimeoutTeardown")
SpawnOptionsSpawnedByTaskKey = bsonutil.MustHaveTag(SpawnOptions{}, "SpawnedByTask")
)
var (
HostsByDistroDistroIDKey = bsonutil.MustHaveTag(IdleHostsByDistroID{}, "DistroID")
HostsByDistroIdleHostsKey = bsonutil.MustHaveTag(IdleHostsByDistroID{}, "IdleHosts")
HostsByDistroRunningHostsCountKey = bsonutil.MustHaveTag(IdleHostsByDistroID{}, "RunningHostsCount")
)
// === Queries ===
// All is a query that returns all hosts
var All = db.Query(struct{}{})
// ByUserWithRunningStatus produces a query that returns all
// running hosts for the given user id.
func ByUserWithRunningStatus(user string) db.Q {
return db.Query(
bson.M{
StartedByKey: user,
StatusKey: bson.M{"$ne": evergreen.HostTerminated},
})
}
// IsLive is a query that returns all working hosts started by Evergreen
func IsLive() bson.M {
return bson.M{
StartedByKey: evergreen.User,
StatusKey: bson.M{"$in": evergreen.UpHostStatus},
}
}
// ByUserWithUnterminatedStatus produces a query that returns all running hosts
// for the given user id.
func ByUserWithUnterminatedStatus(user string) db.Q {
return db.Query(
bson.M{
StartedByKey: user,
StatusKey: bson.M{"$ne": evergreen.HostTerminated},
},
)
}
// AllIdleEphemeral finds all running ephemeral hosts without containers
// that have no running tasks.
func AllIdleEphemeral() ([]Host, error) |
// IdleEphemeralGroupedByDistroId groups and collates the following by distro.Id:
// - []host.Host of ephemeral hosts without containers which having no running task, ordered by {host.CreationTime: 1}
// - the total number of ephemeral hosts with status: evergreen.HostRunning
func IdleEphemeralGroupedByDistroID() ([]IdleHostsByDistroID, error) {
var idlehostsByDistroID []IdleHostsByDistroID
pipeline := []mgobson.M{
{
"$match": mgobson.M{
StartedByKey: evergreen.User,
StatusKey: evergreen.HostRunning,
ProviderKey: mgobson.M{"$in": evergreen.ProviderSpawnable},
HasContainersKey: mgobson.M{"$ne": true},
},
},
{
"$sort": mgobson.M{CreateTimeKey: 1},
},
{
"$group": mgobson.M{
"_id": "$" + bsonutil.GetDottedKeyName(DistroKey, distro.IdKey),
HostsByDistroRunningHostsCountKey: mgobson.M{"$sum": 1},
HostsByDistroIdleHostsKey: mgobson.M{"$push": bson.M{"$cond": []interface{}{mgobson.M{"$eq": []interface{}{"$running_task", mgobson.Undefined}}, "$$ROOT", mgobson.Undefined}}},
},
},
{
"$project": mgobson.M{"_id": 0, HostsByDistroDistroIDKey: "$_id", HostsByDistroIdleHostsKey: 1, HostsByDistroRunningHostsCountKey: 1},
},
}
if err := db.Aggregate(Collection, pipeline, &idlehostsByDistroID); err != nil {
return nil, errors.Wrap(err, "problem grouping idle hosts by Distro.Id")
}
return idlehostsByDistroID, nil
}
func runningHostsQuery(distroID string) bson.M {
query := IsLive()
if distroID != "" {
key := bsonutil.GetDottedKeyName(DistroKey, distro.IdKey)
query[key] = distroID
}
return query
}
func CountRunningHosts(distroID string) (int, error) {
num, err := Count(db.Query(runningHostsQuery(distroID)))
return num, errors.Wrap(err, "problem finding running hosts")
}
func AllRunningHosts(distroID string) (HostGroup, error) {
allHosts, err := Find(db.Query(runningHostsQuery(distroID)))
if err != nil {
return nil, errors.Wrap(err, "Error finding live hosts")
}
return allHosts, nil
}
// AllActiveHosts produces a HostGroup for all hosts with UpHost
// status as well as quarantined hosts. These do not count spawn
// hosts.
func AllActiveHosts(distroID string) (HostGroup, error) {
q := bson.M{
StartedByKey: evergreen.User,
StatusKey: bson.M{"$in": append(evergreen.UpHostStatus, evergreen.HostQuarantined)},
}
if distroID != "" {
q[bsonutil.GetDottedKeyName(DistroKey, distro.IdKey)] = distroID
}
activeHosts, err := Find(db.Query(q))
if err != nil {
return nil, errors.Wrap(err, "problem finding active hosts")
}
return activeHosts, nil
}
// AllHostsSpawnedByTasksToTerminate finds all hosts spawned by tasks that should be terminated.
func AllHostsSpawnedByTasksToTerminate() ([]Host, error) {
catcher := grip.NewBasicCatcher()
var hosts []Host
timedOutHosts, err := allHostsSpawnedByTasksTimedOut()
hosts = append(hosts, timedOutHosts...)
catcher.Add(err)
taskHosts, err := allHostsSpawnedByFinishedTasks()
hosts = append(hosts, taskHosts...)
catcher.Add(err)
buildHosts, err := allHostsSpawnedByFinishedBuilds()
hosts = append(hosts, buildHosts...)
catcher.Add(err)
if catcher.HasErrors() {
return nil, catcher.Resolve()
}
return hosts, nil
}
// allHostsSpawnedByTasksTimedOut finds hosts spawned by tasks that should be terminated because they are past their timeout.
func allHostsSpawnedByTasksTimedOut() ([]Host, error) {
query := db.Query(bson.M{
StatusKey: evergreen.HostRunning,
bsonutil.GetDottedKeyName(SpawnOptionsKey, SpawnOptionsSpawnedByTaskKey): true,
bsonutil.GetDottedKeyName(SpawnOptionsKey, SpawnOptionsTimeoutKey): bson.M{"$lte": time.Now()},
})
return Find(query)
}
// allHostsSpawnedByFinishedTasks finds hosts spawned by tasks that should be terminated because their tasks have finished.
func allHostsSpawnedByFinishedTasks() ([]Host, error) {
const runningTasks = "running_tasks"
pipeline := []bson.M{
{"$match": bson.M{
StatusKey: bson.M{"$in": evergreen.UpHostStatus},
bsonutil.GetDottedKeyName(SpawnOptionsKey, SpawnOptionsSpawnedByTaskKey): true}},
{"$lookup": bson.M{
"from": task.Collection,
"localField": bsonutil.GetDottedKeyName(SpawnOptionsKey, SpawnOptionsTaskIDKey),
"foreignField": task.IdKey,
"as": runningTasks,
}},
{"$unwind": "$" + runningTasks},
{"$match": bson.M{bsonutil.GetDottedKeyName(runningTasks, task.StatusKey): bson.M{"$in": task.CompletedStatuses}}},
{"$project": bson.M{runningTasks: 0}},
}
var hosts []Host
if err := db.Aggregate(Collection, pipeline, &hosts); err != nil {
return nil, errors.Wrap(err, "error getting hosts spawned by finished tasks")
}
return hosts, nil
}
// allHostsSpawnedByFinishedBuilds finds hosts spawned by tasks that should be terminated because their builds have finished.
func allHostsSpawnedByFinishedBuilds() ([]Host, error) {
const runningBuilds = "running_builds"
pipeline := []bson.M{
{"$match": bson.M{
StatusKey: bson.M{"$in": evergreen.UpHostStatus},
bsonutil.GetDottedKeyName(SpawnOptionsKey, SpawnOptionsSpawnedByTaskKey): true}},
{"$lookup": bson.M{
"from": build.Collection,
"localField": bsonutil.GetDottedKeyName(SpawnOptionsKey, SpawnOptionsBuildIDKey),
"foreignField": build.IdKey,
"as": runningBuilds,
}},
{"$unwind": "$" + runningBuilds},
{"$match": bson.M{bsonutil.GetDottedKeyName(runningBuilds, build.StatusKey): bson.M{"$in": build.CompletedStatuses}}},
{"$project": bson.M{runningBuilds: 0}},
}
var hosts []Host
if err := db.Aggregate(Collection, pipeline, &hosts); err != nil {
return nil, errors.Wrap(err, "error getting hosts spawned by finished builds")
}
return hosts, nil
}
// ByUnprovisionedSince produces a query that returns all hosts
// Evergreen never finished setting up that were created before
// the given time.
func ByUnprovisionedSince(threshold time.Time) db.Q {
return db.Query(bson.M{
"$or": []bson.M{
bson.M{ProvisionedKey: false},
bson.M{StatusKey: evergreen.HostProvisioning},
},
CreateTimeKey: bson.M{"$lte": threshold},
StatusKey: bson.M{"$ne": evergreen.HostTerminated},
StartedByKey: evergreen.User,
})
}
// ByTaskSpec returns a query that finds all running hosts that are running a
// task with the given group, buildvariant, project, and version.
func NumHostsByTaskSpec(group, buildVariant, project, version string) (int, error) {
if group == "" || buildVariant == "" || project == "" || version == "" {
s := "all arguments passed to host.NumHostsByTaskSpec must be non-empty strings: "
s += fmt.Sprintf("group is '%s', buildVariant is '%s', project is '%s' and version is '%s'", group, buildVariant, project, version)
return 0, errors.New(s)
}
q := db.Query(
bson.M{
StatusKey: evergreen.HostRunning,
"$or": []bson.M{
{
RunningTaskKey: bson.M{"$exists": "true"},
RunningTaskGroupKey: group,
RunningTaskBuildVariantKey: buildVariant,
RunningTaskProjectKey: project,
RunningTaskVersionKey: version,
},
{
LTCTaskKey: bson.M{"$exists": "true"},
LTCGroupKey: group,
LTCBVKey: buildVariant,
LTCProjectKey: project,
LTCVersionKey: version,
},
},
},
)
hosts, err := Find(q)
if err != nil {
return 0, errors.Wrap(err, "error querying database for hosts")
}
return len(hosts), nil
}
// IsUninitialized is a query that returns all unstarted + uninitialized Evergreen hosts.
var IsUninitialized = db.Query(
bson.M{StatusKey: evergreen.HostUninitialized},
)
// Starting returns a query that finds hosts that we do not yet know to be running.
func Starting() db.Q {
return db.Query(bson.M{StatusKey: evergreen.HostStarting})
}
// Provisioning returns a query used by the hostinit process to determine hosts that are
// started according to the cloud provider, but have not yet been provisioned by Evergreen.
func Provisioning() db.Q {
return db.Query(bson.M{StatusKey: evergreen.HostProvisioning})
}
func FindByFirstProvisioningAttempt() ([]Host, error) {
return Find(db.Query(bson.M{
ProvisionAttemptsKey: 0,
StatusKey: evergreen.HostProvisioning,
}))
}
// FindByExpiringJasperCredentials finds all hosts whose Jasper service
// credentials will expire within the given cutoff.
func FindByExpiringJasperCredentials(cutoff time.Duration) ([]Host, error) {
deadline := time.Now().Add(cutoff)
bootstrapKey := bsonutil.GetDottedKeyName(DistroKey, distro.BootstrapSettingsKey, distro.BootstrapSettingsMethodKey)
credentialsKey := credentials.Collection
expirationKey := bsonutil.GetDottedKeyName(credentialsKey, credentials.TTLKey)
var hosts []Host
pipeline := []bson.M{
bson.M{"$match": bson.M{
bootstrapKey: bson.M{
"$exists": true,
"$ne": distro.BootstrapMethodLegacySSH,
},
StatusKey: evergreen.HostRunning,
HasContainersKey: bson.M{"$ne": true},
ParentIDKey: bson.M{"$exists": false},
}},
bson.M{"$lookup": bson.M{
"from": credentials.Collection,
"localField": JasperCredentialsIDKey,
"foreignField": credentials.IDKey,
"as": credentialsKey,
}},
bson.M{"$match": bson.M{
expirationKey: bson.M{"$lte": deadline},
}},
bson.M{"$project": bson.M{
credentialsKey: 0,
}},
}
err := db.Aggregate(Collection, pipeline, &hosts)
if adb.ResultsNotFound(err) {
return nil, nil
}
return hosts, err
}
// IsRunningAndSpawned is a query that returns all running hosts
// spawned by an Evergreen user.
var IsRunningAndSpawned = db.Query(
bson.M{
StartedByKey: bson.M{"$ne": evergreen.User},
StatusKey: bson.M{"$ne": evergreen.HostTerminated},
},
)
// IsRunningTask is a query that returns all running hosts with a running task
var IsRunningTask = db.Query(
bson.M{
RunningTaskKey: bson.M{"$exists": true},
StatusKey: bson.M{
"$ne": evergreen.HostTerminated,
},
},
)
// IsTerminated is a query that returns all hosts that are terminated
// (and not running a task).
var IsTerminated = db.Query(
bson.M{
RunningTaskKey: bson.M{"$exists": false},
StatusKey: evergreen.HostTerminated},
)
func ByDistroIdDoc(distroId string) bson.M {
dId := fmt.Sprintf("%v.%v", DistroKey, distro.IdKey)
return bson.M{
dId: distroId,
StartedByKey: evergreen.User,
StatusKey: bson.M{"$in": evergreen.UpHostStatus},
}
}
// ByDistroId produces a query that returns all working hosts (not terminated and
// not quarantined) of the given distro.
func ByDistroId(distroId string) db.Q {
return db.Query(ByDistroIdDoc(distroId))
}
// ById produces a query that returns a host with the given id.
func ById(id string) db.Q {
return db.Query(bson.D{{Key: IdKey, Value: id}})
}
func ByDistroIDRunning(distroID string) db.Q {
distroIDKey := bsonutil.GetDottedKeyName(DistroKey, distro.IdKey)
return db.Query(bson.M{
distroIDKey: distroID,
StatusKey: evergreen.HostRunning,
StartedByKey: evergreen.User,
})
}
// ByIds produces a query that returns all hosts in the given list of ids.
func ByIds(ids []string) db.Q {
return db.Query(bson.D{
{
Key: IdKey,
Value: bson.D{
{
Key: "$in",
Value: ids,
},
},
},
})
}
// FindByJasperCredentialsID finds a host with the given Jasper credentials ID.
func FindOneByJasperCredentialsID(id string) (*Host, error) {
h := &Host{}
query := bson.M{JasperCredentialsIDKey: id}
if err := db.FindOne(Collection, query, db.NoProjection, db.NoSort, h); err != nil {
return nil, errors.Wrapf(err, "could not find host with Jasper credentials ID '%s'", id)
}
return h, nil
}
// ByRunningTaskId returns a host running the task with the given id.
func ByRunningTaskId(taskId string) db.Q {
return db.Query(bson.D{{Key: RunningTaskKey, Value: taskId}})
}
// ByDynamicWithinTime is a query that returns all dynamic hosts running between a certain time and another time.
func ByDynamicWithinTime(startTime, endTime time.Time) db.Q {
return db.Query(
bson.M{
"$or": []bson.M{
bson.M{
CreateTimeKey: bson.M{"$lt": endTime},
TerminationTimeKey: bson.M{"$gt": startTime},
ProviderKey: bson.M{"$ne": evergreen.HostTypeStatic},
},
bson.M{
CreateTimeKey: bson.M{"$lt": endTime},
TerminationTimeKey: util.ZeroTime,
StatusKey: evergreen.HostRunning,
ProviderKey: bson.M{"$ne": evergreen.HostTypeStatic},
},
},
})
}
var AllStatic = db.Query(
bson.M{
ProviderKey: evergreen.HostTypeStatic,
})
// IsIdle is a query that returns all running Evergreen hosts with no task.
var IsIdle = db.Query(
bson.M{
RunningTaskKey: bson.M{"$exists": false},
StatusKey: evergreen.HostRunning,
StartedByKey: evergreen.User,
},
)
// ByNotMonitoredSince produces a query that returns all hosts whose
// last reachability check was before the specified threshold,
// filtering out user-spawned hosts and hosts currently running tasks.
func ByNotMonitoredSince(threshold time.Time) db.Q {
return db.Query(bson.M{
"$and": []bson.M{
{RunningTaskKey: bson.M{"$exists": false}},
{StatusKey: evergreen.HostRunning},
{StartedByKey: evergreen.User},
{"$or": []bson.M{
{LastCommunicationTimeKey: bson.M{"$lte": threshold}},
{LastCommunicationTimeKey: bson.M{"$exists": false}},
}},
},
})
}
// ByExpiringBetween produces a query that returns any user-spawned hosts
// that will expire between the specified times.
func ByExpiringBetween(lowerBound time.Time, upperBound time.Time) db.Q {
return db.Query(bson.M{
StartedByKey: bson.M{"$ne": evergreen.User},
StatusKey: bson.M{
"$nin": []string{evergreen.HostTerminated, evergreen.HostQuarantined},
},
ExpirationTimeKey: bson.M{"$gte": lowerBound, "$lte": upperBound},
})
}
// StateRunningTasks returns tasks documents that are currently run by a host and stale
func FindStaleRunningTasks(cutoff time.Duration) ([]task.Task, error) {
pipeline := []bson.M{}
pipeline = append(pipeline, bson.M{
"$match": bson.M{
RunningTaskKey: bson.M{
"$exists": true,
},
StatusKey: bson.M{
"$in": evergreen.UpHostStatus,
},
},
})
pipeline = append(pipeline, bson.M{
"$lookup": bson.M{
"from": task.Collection,
"localField": RunningTaskKey,
"foreignField": task.IdKey,
"as": "_task",
},
})
pipeline = append(pipeline, bson.M{
"$project": bson.M{
"_task": 1,
"_id": 0,
},
})
pipeline = append(pipeline, bson.M{
"$replaceRoot": bson.M{
"newRoot": bson.M{
"$mergeObjects": []interface{}{
bson.M{"$arrayElemAt": []interface{}{"$_task", 0}},
"$$ROOT",
},
},
},
})
pipeline = append(pipeline, bson.M{
"$project": bson.M{
"_task": 0,
},
})
pipeline = append(pipeline, bson.M{
"$match": bson.M{
"$or": []bson.M{
{
task.StatusKey: task.SelectorTaskInProgress,
task.LastHeartbeatKey: bson.M{"$lte": time.Now().Add(-cutoff)},
},
{
task.StatusKey: evergreen.TaskUndispatched,
task.LastHeartbeatKey: bson.M{"$lte": time.Now().Add(-cutoff)},
task.LastHeartbeatKey: bson.M{"$ne": util.ZeroTime},
},
},
},
})
pipeline = append(pipeline, bson.M{
"$project": bson.M{
task.IdKey: 1,
task.ExecutionKey: 1,
},
})
tasks := []task.Task{}
err := db.Aggregate(Collection, pipeline, &tasks)
if err != nil {
return nil, errors.Wrap(err, "error finding stale running tasks")
}
return tasks, nil
}
// AgentLastCommunicationTimeElapsed finds legacy hosts which do not have an
// agent or whose agents have not communicated recently.
func AgentLastCommunicationTimeElapsed(currentTime time.Time) bson.M {
bootstrapKey := bsonutil.GetDottedKeyName(DistroKey, distro.BootstrapSettingsKey, distro.BootstrapSettingsMethodKey)
cutoffTime := currentTime.Add(-MaxLCTInterval)
return bson.M{
StatusKey: evergreen.HostRunning,
StartedByKey: evergreen.User,
HasContainersKey: bson.M{"$ne": true},
ParentIDKey: bson.M{"$exists": false},
RunningTaskKey: bson.M{"$exists": false},
"$and": []bson.M{
bson.M{"$or": []bson.M{
{LastCommunicationTimeKey: util.ZeroTime},
{LastCommunicationTimeKey: bson.M{"$lte": cutoffTime}},
{LastCommunicationTimeKey: bson.M{"$exists": false}},
}},
bson.M{"$or": []bson.M{
{bootstrapKey: bson.M{"$exists": false}},
{bootstrapKey: bson.M{"$in": []string{"", distro.BootstrapMethodLegacySSH}}},
}},
},
}
}
// AgentMonitorLastCommunicationTimeElapsed finds hosts which do not have an
// agent monitor or which should have an agent monitor but their agent has not
// communicated recently.
func AgentMonitorLastCommunicationTimeElapsed(currentTime time.Time) bson.M {
bootstrapKey := bsonutil.GetDottedKeyName(DistroKey, distro.BootstrapSettingsKey, distro.BootstrapSettingsMethodKey)
cutoffTime := currentTime.Add(-MaxLCTInterval)
return bson.M{
StatusKey: evergreen.HostRunning,
StartedByKey: evergreen.User,
HasContainersKey: bson.M{"$ne": true},
ParentIDKey: bson.M{"$exists": false},
RunningTaskKey: bson.M{"$exists": false},
"$or": []bson.M{
{LastCommunicationTimeKey: util.ZeroTime},
{LastCommunicationTimeKey: bson.M{"$lte": cutoffTime}},
{LastCommunicationTimeKey: bson.M{"$exists": false}},
},
bootstrapKey: bson.M{"$in": []string{
distro.BootstrapMethodSSH,
distro.BootstrapMethodUserData,
distro.BootstrapMethodPreconfiguredImage,
}},
}
}
// NeedsNewAgentFlagSet returns legacy hosts with NeedsNewAgent set to true.
func NeedsNewAgentFlagSet() db.Q {
bootstrapKey := bsonutil.GetDottedKeyName(DistroKey, distro.BootstrapSettingsKey, distro.BootstrapSettingsMethodKey)
return db.Query(bson.M{
"$or": []bson.M{
{bootstrapKey: bson.M{"$exists": false}},
{bootstrapKey: bson.M{"$in": []string{"", distro.BootstrapMethodLegacySSH}}},
},
StatusKey: evergreen.HostRunning,
StartedByKey: evergreen.User,
HasContainersKey: bson.M{"$ne": true},
ParentIDKey: bson.M{"$exists": false},
RunningTaskKey: bson.M{"$exists": false},
NeedsNewAgentKey: true,
})
}
// FindByNeedsNewAgentMonitor returns running hosts that need a new agent
// monitor.
func FindByNeedsNewAgentMonitor() ([]Host, error) {
bootstrapKey := bsonutil.GetDottedKeyName(DistroKey, distro.BootstrapSettingsKey, distro.BootstrapSettingsMethodKey)
hosts := []Host{}
query := bson.M{
bootstrapKey: bson.M{
"$exists": true,
"$ne": distro.BootstrapMethodLegacySSH,
},
StatusKey: evergreen.HostRunning,
StartedByKey: evergreen.User,
HasContainersKey: bson.M{"$ne": true},
ParentIDKey: bson.M{"$exists": false},
RunningTaskKey: bson.M{"$exists": false},
NeedsNewAgentMonitorKey: true,
}
err := db.FindAll(Collection, query, db.NoProjection, db.NoSort, db.NoSkip, db.NoLimit, &hosts)
if adb.ResultsNotFound(err) {
return nil, nil
}
return hosts, err
}
// FindUserDataSpawnHostsProvisioning finds all spawn hosts that have been
// provisioned by the app server but are still being provisioned by user data.
func FindUserDataSpawnHostsProvisioning() ([]Host, error) {
bootstrapKey := bsonutil.GetDottedKeyName(DistroKey, distro.BootstrapSettingsKey, distro.BootstrapSettingsMethodKey)
hosts, err := Find(db.Query(bson.M{
StatusKey: evergreen.HostProvisioning,
ProvisionedKey: true,
StartedByKey: bson.M{"$ne": evergreen.User},
bootstrapKey: distro.BootstrapMethodUserData,
}))
if err != nil {
return nil, errors.Wrap(err, "could not find user data spawn hosts that are still provisioning themselves")
}
return hosts, nil
}
// Removes host intents that have been been uninitialized for more than 3
// minutes or spawning (but not started) for more than 15 minutes for the
// specified distro.
//
// If you pass the empty string as a distroID, it will remove stale
// host intents for *all* distros.
func RemoveStaleInitializing(distroID string) error {
query := bson.M{
UserHostKey: false,
ProviderKey: bson.M{"$in": evergreen.ProviderSpawnable},
"$or": []bson.M{
{
StatusKey: evergreen.HostUninitialized,
CreateTimeKey: bson.M{"$lt": time.Now().Add(-3 * time.Minute)},
},
{
StatusKey: evergreen.HostBuilding,
CreateTimeKey: bson.M{"$lt": time.Now().Add(-15 * time.Minute)},
},
},
}
if distroID != "" {
key := bsonutil.GetDottedKeyName(DistroKey, distro.IdKey)
query[key] = distroID
}
return db.RemoveAll(Collection, query)
}
// === DB Logic ===
// FindOne gets one Host for the given query.
func FindOne(query db.Q) (*Host, error) {
host := &Host{}
err := db.FindOneQ(Collection, query, host)
if adb.ResultsNotFound(err) {
return nil, nil
}
return host, err
}
func FindOneId(id string) (*Host, error) {
return FindOne(ById(id))
}
// FindOneByIdOrTag finds a host where the given id is stored in either the _id or tag field.
// (The tag field is used for the id from the host's original intent host.)
func FindOneByIdOrTag(id string) (*Host, error) {
query := db.Query(bson.M{
"$or": []bson.M{
bson.M{TagKey: id},
bson.M{IdKey: id},
},
})
host, err := FindOne(query) // try to find by tag
if err != nil {
return nil, errors.Wrapf(err, "error finding '%s' by _id or tag field", id)
}
return host, nil
}
// Find gets all Hosts for the given query.
func Find(query db.Q) ([]Host, error) {
hosts := []Host{}
return hosts, errors.WithStack(db.FindAllQ(Collection, query, &hosts))
}
// Count returns the number of hosts that satisfy the given query.
func Count(query db.Q) (int, error) {
return db.CountQ(Collection, query)
}
// UpdateOne updates one host.
func UpdateOne(query interface{}, update interface{}) error {
return db.Update(
Collection,
query,
update,
)
}
// UpdateAll updates all hosts.
func UpdateAll(query interface{}, update interface{}) error {
_, err := db.UpdateAll(
Collection,
query,
update,
)
return err
}
// UpsertOne upserts a host.
func UpsertOne(query interface{}, update interface{}) (*adb.ChangeInfo, error) {
return db.Upsert(
Collection,
query,
update,
)
}
func GetHostsByFromIDWithStatus(id, status, user string, limit int) ([]Host, error) {
var statusMatch interface{}
if status != "" {
statusMatch = status
} else {
statusMatch = bson.M{"$in": evergreen.UpHostStatus}
}
filter := bson.M{
IdKey: bson.M{"$gte": id},
StatusKey: statusMatch,
}
if user != "" {
filter[StartedByKey] = user
}
var query db.Q
hosts, err := Find(query.Filter(filter).Sort([]string{IdKey}).Limit(limit))
if err != nil {
return nil, errors.Wrap(err, "Error querying database")
}
return hosts, nil
}
type InactiveHostCounts struct {
HostType string `bson:"_id"`
Count int `bson:"count"`
}
func inactiveHostCountPipeline() []bson.M {
return []bson.M{
{
"$match": bson.M{
StatusKey: bson.M{
"$in": []string{evergreen.HostDecommissioned, evergreen.HostQuarantined},
},
},
},
{
"$project": bson.M{
IdKey: 0,
StatusKey: 1,
ProviderKey: 1,
},
},
{
"$group": bson.M{
"_id": "$" + ProviderKey,
"count": bson.M{
"$sum": 1,
},
},
},
}
}
// FinishTime is a struct for storing pairs of host IDs and last container finish times
type FinishTime struct {
Id string `bson:"_id"`
FinishTime time.Time `bson:"finish_time"`
}
// aggregation pipeline to compute latest finish time for running hosts with child containers
func lastContainerFinishTimePipeline() []bson.M {
const output string = "finish_time"
return []bson.M{
{
// matches all running containers
"$match": bson.M{
ParentIDKey: bson.M{"$exists": true},
StatusKey: evergreen.HostRunning,
},
},
{
// joins hosts and tasks collections on task ID
"$lookup": bson.M{
"from": task.Collection,
"localField": RunningTaskKey,
"foreignField": IdKey,
"as": "task",
},
},
{
// deconstructs $lookup array
"$unwind": "$task",
},
{
// groups containers by parent host ID
"$group": bson.M{
"_id": "$" + ParentIDKey,
output: bson.M{
// computes last container finish time for each host
"$max": bson.M{
"$add": []interface{}{bsonutil.GetDottedKeyName("$task", "start_time"),
// divide by 1000000 to treat duration as milliseconds rather than as nanoseconds
bson.M{"$divide": []interface{}{bsonutil.GetDottedKeyName("$task", "duration_prediction", "value"), 1000000}},
},
},
},
},
},
{
// projects only ID and finish time
"$project": bson.M{
output: 1,
},
},
}
}
// AggregateLastContainerFinishTimes returns the latest finish time for each host with containers
func AggregateLastContainerFinishTimes() ([]FinishTime, error) {
var times []FinishTime
err := db.Aggregate(Collection, lastContainerFinishTimePipeline(), ×)
if err != nil {
return nil, errors.Wrap(err, "error aggregating parent finish times")
}
return times, nil
}
| {
query := db.Query(bson.M{
RunningTaskKey: bson.M{"$exists": false},
StartedByKey: evergreen.User,
StatusKey: evergreen.HostRunning,
ProviderKey: bson.M{"$in": evergreen.ProviderSpawnable},
HasContainersKey: bson.M{"$ne": true},
})
return Find(query)
} |
utils_darwin.go | package utils
import (
"fmt"
"os"
"os/exec"
"runtime"
"syscall"
"github.com/pkg/errors"
"golang.org/x/image/font"
)
func HideWindow(cmd *exec.Cmd) {
}
func BreakAwayFromParent(cmd *exec.Cmd) {
var sysProcAttr *syscall.SysProcAttr
if cmd.SysProcAttr != nil {
sysProcAttr = cmd.SysProcAttr
} else {
sysProcAttr = &syscall.SysProcAttr{}
cmd.SysProcAttr = sysProcAttr
}
sysProcAttr.Setsid = true
}
func HideJavaWindowIfNeeded(cmd *exec.Cmd) {
}
func LoadIconAndSetForWindow(windowTitle string) error {
return nil
}
func LoadFont(fontFace string, size int, scaling float64) (font.Face, error) {
return nil, errors.Errorf("LoadFont not implemented for platform %s", runtime.GOOS)
}
func CallLibrary(path string, funcName string, arg string) (err error) {
return nil
}
func CreateDesktopShortcut(src, title, description, iconSrc string, arguments ...string) error {
return nil
}
func CreateStartMenuShortcut(src, folder, title, description, iconSrc string, arguments ...string) error {
return nil
}
func RemoveDesktopShortcut(title string) error {
return nil
}
func RemoveStartMenuFolder(folder string) error {
return nil
}
func ShowUsage(productTitle, productVersion, text string) {
fmt.Fprintf(os.Stderr, text)
}
func ShowFatalError(text string) {
fmt.Fprintf(os.Stderr, text)
}
func InstallApp(app *AppInfo) error |
func UninstallApp(title string) error {
return nil
}
func OpenTextFile(filename string) error {
cmd := exec.Command("Open", "-t", filename)
return cmd.Start()
}
| {
return nil
} |
server.py | #!/usr/bin/env python
import argparse
import hashlib
import requests
import os
from http.server import HTTPServer, BaseHTTPRequestHandler
from socketserver import ThreadingMixIn
host_target = os.environ['AIS_TARGET_URL']
class Handler(BaseHTTPRequestHandler):
def log_request(self, code='-', size='-'):
# Don't log successful requests info. Unsuccessful logged by log_error().
pass
def _set_headers(self):
self.send_response(200)
self.send_header("Content-Type", "text/plain")
self.end_headers()
def do_PUT(self):
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
md5 = hashlib.md5()
md5.update(post_data)
self._set_headers()
self.wfile.write(md5.hexdigest().encode())
def do_GET(self):
if self.path == "/health":
self._set_headers()
self.wfile.write(b"OK")
return
x = requests.get(host_target + self.path)
md5 = hashlib.md5()
md5.update(x.content)
self._set_headers()
self.wfile.write(md5.hexdigest().encode())
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
"""Handle requests in a separate thread."""
def run(addr="localhost", port=8000):
server = ThreadedHTTPServer((addr, port), Handler)
print(f"Starting HTTP server on {addr}:{port}")
server.serve_forever()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run a simple HTTP server")
parser.add_argument(
"-l",
"--listen",
default="localhost",
help="Specify the IP address on which the server listens",
)
parser.add_argument(
"-p",
"--port",
type=int,
default=8000, | args = parser.parse_args()
run(addr=args.listen, port=args.port) | help="Specify the port on which the server listens",
) |
stmtctx_test.go | // Copyright 2020 WHTCORPS INC, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package stmtctx_test
import (
"fmt"
"testing"
"time"
. "github.com/whtcorpsinc/check"
"github.com/whtcorpsinc/milevadb/stochastikctx/stmtctx"
"github.com/whtcorpsinc/milevadb/soliton/execdetails"
)
func | (t *testing.T) {
TestingT(t)
}
type stmtctxSuit struct{}
var _ = Suite(&stmtctxSuit{})
func (s *stmtctxSuit) TestCausetTasksDetails(c *C) {
ctx := new(stmtctx.StatementContext)
backoffs := []string{"einsteindbRPC", "FIDelRPC", "regionMiss"}
for i := 0; i < 100; i++ {
d := &execdetails.InterDircDetails{
CalleeAddress: fmt.Sprintf("%v", i+1),
ProcessTime: time.Second * time.Duration(i+1),
WaitTime: time.Millisecond * time.Duration(i+1),
BackoffSleep: make(map[string]time.Duration),
BackoffTimes: make(map[string]int),
}
for _, backoff := range backoffs {
d.BackoffSleep[backoff] = time.Millisecond * 100 * time.Duration(i+1)
d.BackoffTimes[backoff] = i + 1
}
ctx.MergeInterDircDetails(d, nil)
}
d := ctx.CausetTasksDetails()
c.Assert(d.NumCausetTasks, Equals, 100)
c.Assert(d.AvgProcessTime, Equals, time.Second*101/2)
c.Assert(d.P90ProcessTime, Equals, time.Second*91)
c.Assert(d.MaxProcessTime, Equals, time.Second*100)
c.Assert(d.MaxProcessAddress, Equals, "100")
c.Assert(d.AvgWaitTime, Equals, time.Millisecond*101/2)
c.Assert(d.P90WaitTime, Equals, time.Millisecond*91)
c.Assert(d.MaxWaitTime, Equals, time.Millisecond*100)
c.Assert(d.MaxWaitAddress, Equals, "100")
fields := d.ToZapFields()
c.Assert(len(fields), Equals, 9)
for _, backoff := range backoffs {
c.Assert(d.MaxBackoffAddress[backoff], Equals, "100")
c.Assert(d.MaxBackoffTime[backoff], Equals, 100*time.Millisecond*100)
c.Assert(d.P90BackoffTime[backoff], Equals, time.Millisecond*100*91)
c.Assert(d.AvgBackoffTime[backoff], Equals, time.Millisecond*100*101/2)
c.Assert(d.TotBackoffTimes[backoff], Equals, 101*50)
c.Assert(d.TotBackoffTime[backoff], Equals, 101*50*100*time.Millisecond)
}
}
func (s *stmtctxSuit) TestStatementContextPushDownFLags(c *C) {
testCases := []struct {
in *stmtctx.StatementContext
out uint64
}{
{&stmtctx.StatementContext{InInsertStmt: true}, 8},
{&stmtctx.StatementContext{InUFIDelateStmt: true}, 16},
{&stmtctx.StatementContext{InDeleteStmt: true}, 16},
{&stmtctx.StatementContext{InSelectStmt: true}, 32},
{&stmtctx.StatementContext{IgnoreTruncate: true}, 1},
{&stmtctx.StatementContext{TruncateAsWarning: true}, 2},
{&stmtctx.StatementContext{OverflowAsWarning: true}, 64},
{&stmtctx.StatementContext{IgnoreZeroInDate: true}, 128},
{&stmtctx.StatementContext{DividedByZeroAsWarning: true}, 256},
{&stmtctx.StatementContext{InLoadDataStmt: true}, 1024},
{&stmtctx.StatementContext{InSelectStmt: true, TruncateAsWarning: true}, 34},
{&stmtctx.StatementContext{DividedByZeroAsWarning: true, IgnoreTruncate: true}, 257},
{&stmtctx.StatementContext{InUFIDelateStmt: true, IgnoreZeroInDate: true, InLoadDataStmt: true}, 1168},
}
for _, tt := range testCases {
got := tt.in.PushDownFlags()
c.Assert(got, Equals, tt.out, Commentf("get %v, want %v", got, tt.out))
}
}
| TestT |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.